From 706ccbd8b5a7fef7c6836e27d457cc33458aef35 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 25 Aug 2021 05:39:33 +0200 Subject: [PATCH 001/128] Remove Needless Sleeps on Node Configuration Changes in Internal Cluster Tests (#76884) I noticed this recently when trying to reproduce a test failure. We're doing a lot of sleeping when validating that the cluster formed if that process is slow randomly (which it tends to be due to disk interaction on node starts and such.). By reusing the approach for waiting on a cluster state we rarely if ever need to get into the busy assert loop and remove all these sleeps, shaving of a few seconds here and there from running internal cluster tests. --- .../AbstractSnapshotIntegTestCase.java | 29 +-------------- .../test/ClusterServiceUtils.java | 37 +++++++++++++++++++ .../test/InternalTestCluster.java | 10 +++++ 3 files changed, 49 insertions(+), 27 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 89e2d1419d556..d5d6abfdb8c45 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -20,7 +20,6 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.SnapshotDeletionsInProgress; import org.elasticsearch.cluster.SnapshotsInProgress; @@ -36,14 +35,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.core.Tuple; -import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.FinalizeSnapshotContext; import org.elasticsearch.repositories.RepositoriesService; @@ -56,6 +53,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.snapshots.mockstore.MockRepository; +import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; @@ -78,7 +76,6 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.Function; @@ -524,29 +521,7 @@ public static void awaitClusterState(Logger logger, Predicate stat } public static void awaitClusterState(Logger logger, String viaNode, Predicate statePredicate) throws Exception { - final ClusterService clusterService = internalCluster().getInstance(ClusterService.class, viaNode); - final ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, viaNode); - final ClusterStateObserver observer = new ClusterStateObserver(clusterService, logger, threadPool.getThreadContext()); - if (statePredicate.test(observer.setAndGetObservedState()) == false) { - final PlainActionFuture future = PlainActionFuture.newFuture(); - observer.waitForNextChange(new ClusterStateObserver.Listener() { - @Override - public void onNewClusterState(ClusterState state) { - future.onResponse(null); - } - - @Override - public void onClusterServiceClose() { - future.onFailure(new NodeClosedException(clusterService.localNode())); - } - - @Override - public void onTimeout(TimeValue timeout) { - future.onFailure(new TimeoutException()); - } - }, statePredicate); - future.get(30L, TimeUnit.SECONDS); - } + ClusterServiceUtils.awaitClusterState(logger, statePredicate, internalCluster().getInstance(ClusterService.class, viaNode)); } protected ActionFuture startFullSnapshotBlockedOnDataNode(String snapshotName, String repoName, diff --git a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java index 656f8dbc96c10..7d5cb754844e8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java @@ -7,11 +7,14 @@ */ package org.elasticsearch.test; +import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.util.Throwables; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStatePublicationEvent; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.NodeConnectionsService; @@ -27,11 +30,15 @@ import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; import java.util.Collections; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; import static junit.framework.TestCase.fail; @@ -179,4 +186,34 @@ public static void setAllElapsedMillis(ClusterStatePublicationEvent clusterState clusterStatePublicationEvent.setMasterApplyElapsedMillis(0L); } + public static void awaitClusterState(Logger logger, + Predicate statePredicate, + ClusterService clusterService) throws Exception { + final ClusterStateObserver observer = new ClusterStateObserver( + clusterService, + null, + logger, + clusterService.getClusterApplierService().threadPool().getThreadContext() + ); + if (statePredicate.test(observer.setAndGetObservedState()) == false) { + final PlainActionFuture future = PlainActionFuture.newFuture(); + observer.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + future.onResponse(null); + } + + @Override + public void onClusterServiceClose() { + future.onFailure(new NodeClosedException(clusterService.localNode())); + } + + @Override + public void onTimeout(TimeValue timeout) { + assert false : "onTimeout called with no timeout set"; + } + }, statePredicate); + future.get(30L, TimeUnit.SECONDS); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 8b117c325fbde..29ece3c35791c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1118,6 +1118,16 @@ public synchronized void validateClusterFormed() { } logger.trace("validating cluster formed, expecting {}", expectedNodes); + try { + // use waiting via the cluster service first to save on some busy-waiting and sleeping before entering the busy assert below + ClusterServiceUtils.awaitClusterState( + logger, + state -> state.nodes().getMasterNodeId() != null && state.nodes().getSize() == expectedNodes.size(), + getInstance(ClusterService.class) + ); + } catch (Exception e) { + throw new IllegalStateException(e); + } try { assertBusy(() -> { final List states = nodes.values().stream() From deac0030bf6c1675b5b55fec090dec3467995268 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 25 Aug 2021 08:04:33 +0200 Subject: [PATCH 002/128] [Test] TransformIndexerStateTests testStopAtCheckpoint fix listener count (#76880) change check for open listeners, avoiding failures due to execution timing fixes #76555 --- .../transforms/TransformIndexerStateTests.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java index 3091c4f62c089..0895aa1b43ccd 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java @@ -216,11 +216,7 @@ protected void doSaveState(IndexerState state, TransformIndexerPosition position protected IterationResult doProcess(SearchResponse searchResponse) { // pretend that we processed 10k documents for each call getStats().incrementNumDocuments(10_000); - return new IterationResult<>( - Stream.of(new IndexRequest()), - new TransformIndexerPosition(null, null), - false - ); + return new IterationResult<>(Stream.of(new IndexRequest()), new TransformIndexerPosition(null, null), false); } public boolean waitingForNextSearch() { @@ -231,6 +227,11 @@ public int getSaveStateListenerCallCount() { return saveStateListenerCallCount; } + public int getSaveStateListenerCount() { + Collection> saveStateListenersAtTheMomentOfCalling = saveStateListeners.get(); + return (saveStateListenersAtTheMomentOfCalling != null) ? saveStateListenersAtTheMomentOfCalling.size() : 0; + } + public TransformState getPersistedState() { return persistedState; } @@ -455,14 +456,12 @@ public void testStopAtCheckpoint() throws Exception { CountDownLatch searchLatch = indexer.createAwaitForSearchLatch(1); List responseLatches = new ArrayList<>(); - int timesStopAtCheckpointChanged = 0; // default stopAtCheckpoint is false boolean previousStopAtCheckpoint = false; for (int i = 0; i < 3; ++i) { CountDownLatch latch = new CountDownLatch(1); boolean stopAtCheckpoint = randomBoolean(); - timesStopAtCheckpointChanged += (stopAtCheckpoint == previousStopAtCheckpoint ? 0 : 1); previousStopAtCheckpoint = stopAtCheckpoint; countResponse(listener -> setStopAtCheckpoint(indexer, stopAtCheckpoint, listener), latch); responseLatches.add(latch); @@ -474,7 +473,6 @@ public void testStopAtCheckpoint() throws Exception { // call it 3 times again for (int i = 0; i < 3; ++i) { boolean stopAtCheckpoint = randomBoolean(); - timesStopAtCheckpointChanged += (stopAtCheckpoint == previousStopAtCheckpoint ? 0 : 1); previousStopAtCheckpoint = stopAtCheckpoint; assertResponse(listener -> setStopAtCheckpoint(indexer, stopAtCheckpoint, listener)); } @@ -487,9 +485,11 @@ public void testStopAtCheckpoint() throws Exception { assertTrue("timed out after 5s", l.await(5, TimeUnit.SECONDS)); } + // there should be no listeners waiting + assertEquals(0, indexer.getSaveStateListenerCount()); + // listener must have been called by the indexing thread between timesStopAtCheckpointChanged and 6 times // this is not exact, because we do not know _when_ the other thread persisted the flag - assertThat(indexer.getSaveStateListenerCallCount(), greaterThanOrEqualTo(timesStopAtCheckpointChanged)); assertThat(indexer.getSaveStateListenerCallCount(), lessThanOrEqualTo(6)); } } From 9fa8babb4a4fbe9eb7f8f96a5f8dbd2ce64b6201 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 25 Aug 2021 09:43:22 +0200 Subject: [PATCH 003/128] Fix Unexpected DS Name in CCR Test around Midnight (#76890) While `verifyDataStream` correctly accounts for data changes during rollover the rest of the test did not. Since we have the concrete correct index names available in `verifyDataStream` already we might as well just return them here and use them. closes #76163 --- .../elasticsearch/xpack/ccr/AutoFollowIT.java | 32 +++++++++++++++---- .../xpack/ccr/ESCCRRestTestCase.java | 10 ++++-- 2 files changed, 32 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java index 33c6a65924e9e..6d150866200d0 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java @@ -400,6 +400,7 @@ public void testRolloverDataStreamInFollowClusterForbidden() throws Exception { final String autoFollowPatternName = getTestName().toLowerCase(Locale.ROOT); int initialNumberOfSuccessfulFollowedIndices = getNumberOfSuccessfulFollowedIndices(); + List backingIndexNames = null; try { // Create auto follow pattern createAutoFollowPattern(client(), autoFollowPatternName, "logs-tomcat-*", "leader_cluster"); @@ -468,27 +469,44 @@ public void testRolloverDataStreamInFollowClusterForbidden() throws Exception { // Try again and now the rollover should be successful because local data stream is now : var rolloverRequest3 = new Request("POST", "/" + dataStreamName + "/_rollover"); assertOK(client().performRequest(rolloverRequest3)); - verifyDataStream(client(), dataStreamName, backingIndexName(dataStreamName, 1), backingIndexName(dataStreamName, 2), - backingIndexName(dataStreamName, 3)); + backingIndexNames = verifyDataStream( + client(), + dataStreamName, + backingIndexName(dataStreamName, 1), + backingIndexName(dataStreamName, 2), + backingIndexName(dataStreamName, 3) + ); // TODO: verify that following a backing index for logs-tomcat-prod data stream in remote cluster fails, // because local data stream isn't a replicated data stream anymore. // Unfollow .ds-logs-tomcat-prod-000002, // which is now possible because this index can now be closed as it is no longer the write index. - pauseFollow(backingIndexName(dataStreamName, 2)); - closeIndex(backingIndexName(dataStreamName, 2)); - unfollow(backingIndexName(dataStreamName, 2)); + // use the backing index name returned from the verify call so we are guaranteed to use the correct index name even if the + // date rolled over + final String backingIndexNameGen2 = backingIndexNames.get(1); + pauseFollow(backingIndexNameGen2); + closeIndex(backingIndexNameGen2); + unfollow(backingIndexNameGen2); } } finally { + if (backingIndexNames == null) { + // we failed to compute the actual backing index names in the test because we failed earlier on, guessing them on a + // best-effort basis + backingIndexNames = List.of( + backingIndexName(dataStreamName, 1), + backingIndexName(dataStreamName, 2), + backingIndexName(dataStreamName, 3) + ); + } cleanUpFollower( - List.of(backingIndexName(dataStreamName, 1), backingIndexName(dataStreamName, 2), backingIndexName(dataStreamName, 3)), + backingIndexNames, List.of(dataStreamName), List.of(autoFollowPatternName) ); cleanUpLeader( - List.of(backingIndexName(dataStreamName, 1), backingIndexName(dataStreamName, 2)), + backingIndexNames.subList(0, 2), List.of(dataStreamName), List.of() ); diff --git a/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java index d5074dc2c642d..9145a4968595f 100644 --- a/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java +++ b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java @@ -24,6 +24,7 @@ import org.elasticsearch.test.rest.ESRestTestCase; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -304,15 +305,16 @@ protected static boolean indexExists(String index) throws IOException { return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); } - protected static void verifyDataStream(final RestClient client, - final String name, - final String... expectedBackingIndices) throws IOException { + protected static List verifyDataStream(final RestClient client, + final String name, + final String... expectedBackingIndices) throws IOException { Request request = new Request("GET", "/_data_stream/" + name); Map response = toMap(client.performRequest(request)); List retrievedDataStreams = (List) response.get("data_streams"); assertThat(retrievedDataStreams, hasSize(1)); List actualBackingIndexItems = (List) ((Map) retrievedDataStreams.get(0)).get("indices"); assertThat(actualBackingIndexItems, hasSize(expectedBackingIndices.length)); + final List actualBackingIndices = new ArrayList<>(); for (int i = 0; i < expectedBackingIndices.length; i++) { Map actualBackingIndexItem = (Map) actualBackingIndexItems.get(i); String actualBackingIndex = (String) actualBackingIndexItem.get("index_name"); @@ -325,7 +327,9 @@ protected static void verifyDataStream(final RestClient client, int actualGeneration = Integer.parseInt(actualBackingIndex.substring(actualBackingIndex.lastIndexOf('-'))); int expectedGeneration = Integer.parseInt(expectedBackingIndex.substring(expectedBackingIndex.lastIndexOf('-'))); assertThat(actualGeneration, equalTo(expectedGeneration)); + actualBackingIndices.add(actualBackingIndex); } + return List.copyOf(actualBackingIndices); } protected static void createAutoFollowPattern(RestClient client, String name, String pattern, String remoteCluster) throws IOException { From 79ae3865061b31131c3714af25c2089572dd508e Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 25 Aug 2021 10:22:31 +0200 Subject: [PATCH 004/128] Fix join keys ordering in a sequence (#76699) A sequence payload is constructed by providing one list of sequences and one for the hits. When fetching the list of hits though, the list of sequnces can be iterated in reverse order to build the hit references; meaning that the original list of sequences provided to the sequence payload needs reversing too. --- .../rest-api-spec/test/eql/10_basic.yml | 46 +++++++++++++++++++ .../execution/sequence/TumblingWindow.java | 4 ++ 2 files changed, 50 insertions(+) diff --git a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml index 1ff0f7a2c9cf4..3bf26028bb84b 100644 --- a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml +++ b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml @@ -41,6 +41,42 @@ setup: user: SYSTEM id: 123 valid: true + - index: + _index: eql_test + _id: 4 + - event: + - category: network + "@timestamp": 2020-02-06T12:34:56Z + user: ADMIN + id: 123 + valid: true + - index: + _index: eql_test + _id: 5 + - event: + - category: network + "@timestamp": 2020-02-07T12:34:56Z + user: SYSTEM + id: 123 + valid: true + - index: + _index: eql_test + _id: 6 + - event: + - category: network + "@timestamp": 2020-02-08T12:34:56Z + user: ADMIN + id: 123 + valid: true + - index: + _index: eql_test + _id: 7 + - event: + - category: network + "@timestamp": 2020-02-09T12:34:56Z + user: SYSTEM + id: 123 + valid: true --- # Testing round-trip and the basic shape of the response @@ -382,3 +418,13 @@ setup: catch: missing eql.get_status: id: $id +--- +"Sequence checking correct join key ordering.": + + - do: + eql.search: + index: eql_test + body: + query: 'sequence by user [network where valid == true] [network where true]' + - match: {hits.sequences.0.join_keys.0: "ADMIN"} + - match: {hits.sequences.1.join_keys.0: "SYSTEM"} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java index 06de8582bfd1c..bd95f8b34307c 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.eql.util.ReversedIterator; import org.elasticsearch.xpack.ql.util.ActionListeners; +import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; @@ -548,6 +549,9 @@ private void payload(ActionListener listener) { // get results through search (to keep using PIT) client.fetchHits(hits(completed), ActionListeners.map(listener, listOfHits -> { + if (criteria.get(0).descending()) { + Collections.reverse(completed); + } SequencePayload payload = new SequencePayload(completed, listOfHits, false, timeTook()); close(listener); return payload; From 283e6b1d4a48dec294e9fd88a8b6c2228a9f6cde Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 25 Aug 2021 10:55:11 +0100 Subject: [PATCH 005/128] [ML] Prefer American English spellings (#76915) organisation -> organization --- .../xpack/ml/inference/nlp/NerProcessor.java | 6 +++--- .../xpack/ml/inference/nlp/NerProcessorTests.java | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java index 4ccace4fc5950..1a70c7bda9141 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java @@ -28,7 +28,7 @@ public class NerProcessor implements NlpTask.Processor { public enum Entity implements Writeable { - NONE, MISC, PERSON, ORGANISATION, LOCATION; + NONE, MISC, PERSON, ORGANIZATION, LOCATION; @Override public void writeTo(StreamOutput out) throws IOException { @@ -48,8 +48,8 @@ enum IobTag { I_MISC(Entity.MISC), // Miscellaneous entity B_PER(Entity.PERSON), // Beginning of a person's name right after another person's name I_PER(Entity.PERSON), // Person's name - B_ORG(Entity.ORGANISATION), // Beginning of an organisation right after another organisation - I_ORG(Entity.ORGANISATION), // Organisation + B_ORG(Entity.ORGANIZATION), // Beginning of an organisation right after another organisation + I_ORG(Entity.ORGANIZATION), // Organisation B_LOC(Entity.LOCATION), // Beginning of a location right after another location I_LOC(Entity.LOCATION); // Location diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java index a27ae10c7b810..f9547aa35c3e8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java @@ -108,7 +108,7 @@ public void testProcessResults() { assertThat(result.getEntityGroups().size(), equalTo(2)); assertThat(result.getEntityGroups().get(0).getWord(), equalTo("elasticsearch")); - assertThat(result.getEntityGroups().get(0).getLabel(), equalTo(NerProcessor.Entity.ORGANISATION.toString())); + assertThat(result.getEntityGroups().get(0).getLabel(), equalTo(NerProcessor.Entity.ORGANIZATION.toString())); assertThat(result.getEntityGroups().get(1).getWord(), equalTo("london")); assertThat(result.getEntityGroups().get(1).getLabel(), equalTo(NerProcessor.Entity.LOCATION.toString())); } @@ -144,7 +144,7 @@ public void testProcessResults_withIobMap() { assertThat(result.getEntityGroups().size(), equalTo(2)); assertThat(result.getEntityGroups().get(0).getWord(), equalTo("elasticsearch")); - assertThat(result.getEntityGroups().get(0).getLabel(), equalTo(NerProcessor.Entity.ORGANISATION.toString())); + assertThat(result.getEntityGroups().get(0).getLabel(), equalTo(NerProcessor.Entity.ORGANIZATION.toString())); assertThat(result.getEntityGroups().get(1).getWord(), equalTo("london")); assertThat(result.getEntityGroups().get(1).getLabel(), equalTo(NerProcessor.Entity.LOCATION.toString())); } @@ -169,7 +169,7 @@ public void testGroupTaggedTokens() { assertThat(entityGroups.get(0).getWord(), equalTo("Sarah Jessica")); assertThat(entityGroups.get(1).getLabel(), equalTo("location")); assertThat(entityGroups.get(1).getWord(), equalTo("Manchester")); - assertThat(entityGroups.get(2).getLabel(), equalTo("organisation")); + assertThat(entityGroups.get(2).getLabel(), equalTo("organization")); assertThat(entityGroups.get(2).getWord(), equalTo("Elastic")); } @@ -214,7 +214,7 @@ public void testGroupTaggedTokens_GivenConsecutiveContinuingEntities() { assertThat(entityGroups.get(0).getWord(), equalTo("FirstName SecondName")); assertThat(entityGroups.get(1).getLabel(), equalTo("person")); assertThat(entityGroups.get(1).getWord(), equalTo("NextPerson NextPersonSecondName")); - assertThat(entityGroups.get(2).getLabel(), equalTo("organisation")); + assertThat(entityGroups.get(2).getLabel(), equalTo("organization")); } private static TokenizationResult tokenize(List vocab, String input) { From 789368b38fc8d9da60d81f4748a17de5479754d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 25 Aug 2021 12:04:32 +0200 Subject: [PATCH 006/128] [DOCS] Fixes a syntax error in datafeed runtime field example. (#76917) --- .../ml/anomaly-detection/ml-configuring-transform.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc index 9d02c3d011eac..ef442b4b92913 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc @@ -392,7 +392,7 @@ POST _ml/datafeeds/datafeed-test2/_update "my_runtime_field": { "type": "keyword", "script": { - "source": "emit(def m = /(.*)-bar-([0-9][0-9])/.matcher(doc['tokenstring3'].value); return m.find() ? m.group(1) + '_' + m.group(2) : '';)" <1> + "source": "def m = /(.*)-bar-([0-9][0-9])/.matcher(doc['tokenstring3'].value); emit(m.find() ? m.group(1) + '_' + m.group(2) : '');" <1> } } } From 7b618a01d96a919dbb87d45ea562121a731239ae Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 25 Aug 2021 09:41:17 -0400 Subject: [PATCH 007/128] Relax disk usage test assertions (#76895) This commit allows up to 2K data structures that are loaded before we can track bytes read. Closes #76643 --- .../indices/diskusage/IndexDiskUsageAnalyzerTests.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index 8a9f2534f7002..e6f70bf1d1a04 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -520,15 +520,15 @@ private static void assertStats(IndexDiskUsageStats actualStats, IndexDiskUsageS assertFieldStats(field, "doc values", actualField.getDocValuesBytes(), expectedField.getDocValuesBytes(), 0.01, 2560); assertFieldStats(field, "inverted index", - actualField.getInvertedIndexBytes(), expectedField.getInvertedIndexBytes(), 0.01, 1024); + actualField.getInvertedIndexBytes(), expectedField.getInvertedIndexBytes(), 0.01, 2048); } // We are not able to collect per field stats for stored, vector, points, and norms IndexDiskUsageStats.PerFieldDiskUsage actualTotal = actualStats.total(); IndexDiskUsageStats.PerFieldDiskUsage expectedTotal = perFieldStats.total(); - assertFieldStats("total", "stored fields", actualTotal.getStoredFieldBytes(), expectedTotal.getStoredFieldBytes(), 0.01, 1024); - assertFieldStats("total", "points", actualTotal.getPointsBytes(), expectedTotal.getPointsBytes(), 0.01, 1024); - assertFieldStats("total", "term vectors", actualTotal.getTermVectorsBytes(), expectedTotal.getTermVectorsBytes(), 0.01, 1024); - assertFieldStats("total", "norms", actualTotal.getNormsBytes(), expectedTotal.getNormsBytes(), 0.01, 1024); + assertFieldStats("total", "stored fields", actualTotal.getStoredFieldBytes(), expectedTotal.getStoredFieldBytes(), 0.01, 2048); + assertFieldStats("total", "points", actualTotal.getPointsBytes(), expectedTotal.getPointsBytes(), 0.01, 2048); + assertFieldStats("total", "term vectors", actualTotal.getTermVectorsBytes(), expectedTotal.getTermVectorsBytes(), 0.01, 2048); + assertFieldStats("total", "norms", actualTotal.getNormsBytes(), expectedTotal.getNormsBytes(), 0.01, 2048); } private static void assertFieldStats(String fieldName, String fieldType, From 25984d159d2809ef8b61f94c8d37c13b792c2949 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 25 Aug 2021 09:43:37 -0400 Subject: [PATCH 008/128] Ensure norms is non-zero in IndexDiskUsageAnalyzerIT (#76894) The test fails when norms have a singleton value and is loaded eagerly. This commit makes sure that we index more than one norm value. Closes #76173 --- .../diskusage/IndexDiskUsageAnalyzerIT.java | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java index 8a4f8762801ee..05330a64356c8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java @@ -63,6 +63,17 @@ public void testSimple() throws Exception { .setSource(doc) .get(); } + final boolean forceNorms = randomBoolean(); + if (forceNorms) { + final XContentBuilder doc = XContentFactory.jsonBuilder() + .startObject() + .field("english_text", "A long sentence to make sure that norms is non-zero") + .endObject(); + client().prepareIndex(index) + .setId("id") + .setSource(doc) + .get(); + } PlainActionFuture future = PlainActionFuture.newFuture(); client().execute(AnalyzeIndexDiskUsageAction.INSTANCE, new AnalyzeIndexDiskUsageRequest(new String[] {index}, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true), @@ -77,8 +88,9 @@ public void testSimple() throws Exception { final IndexDiskUsageStats.PerFieldDiskUsage englishField = stats.getFields().get("english_text"); assertThat(englishField.getInvertedIndexBytes(), greaterThan(0L)); assertThat(englishField.getStoredFieldBytes(), equalTo(0L)); - assertThat(englishField.getNormsBytes(), greaterThan(0L)); - + if (forceNorms) { + assertThat(englishField.getNormsBytes(), greaterThan(0L)); + } final IndexDiskUsageStats.PerFieldDiskUsage valueField = stats.getFields().get("value"); assertThat(valueField.getInvertedIndexBytes(), equalTo(0L)); assertThat(valueField.getStoredFieldBytes(), equalTo(0L)); From a50a8f9eb92ae170b3e665271b26d1f1d0f66495 Mon Sep 17 00:00:00 2001 From: weensykim Date: Wed, 25 Aug 2021 23:03:51 +0900 Subject: [PATCH 009/128] [DOCS] Fix typo (#76911) --- docs/java-rest/high-level/migration.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/java-rest/high-level/migration.asciidoc b/docs/java-rest/high-level/migration.asciidoc index c8db57f52598d..babc7681be636 100644 --- a/docs/java-rest/high-level/migration.asciidoc +++ b/docs/java-rest/high-level/migration.asciidoc @@ -219,7 +219,7 @@ transportClient.delete(request, new ActionListener() { // <2> }); -------------------------------------------------- <1> Create the `DeleteRequest` using its constructor -<2> Execute the `DeleteRequest` by passing the request and a +<2> Execute the `DeleteRequest` by passing the request and an `ActionListener` that gets called on execution completion or failure. This method does not block and returns immediately. <3> The `onResponse()` method is called when the response is @@ -234,7 +234,7 @@ The same request asynchronously executed using the high-level client is: include-tagged::{doc-tests}/MigrationDocumentationIT.java[migration-request-async-execution] -------------------------------------------------- <1> Create the `DeleteRequest` using its constructor -<2> Execute the `DeleteRequest` by passing the request and a +<2> Execute the `DeleteRequest` by passing the request and an `ActionListener` that gets called on execution completion or failure. This method does not block and returns immediately. <3> The `onResponse()` method is called when the response is From 9389396183e6cccfd34641d1b7f515ad94c95fe1 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 25 Aug 2021 09:01:21 -0700 Subject: [PATCH 010/128] Remove unnecessary test task dependency (#76938) We auto-wire the bwcTestSnapshots task to the check task. This dependOn means we run the full wire compatible test suite on every build which is not what we want since we run those explicitly in parallel CI jobs. Running check should only test unreleased versions. Co-authored-by: Elastic Machine --- x-pack/plugin/sql/qa/mixed-node/build.gradle | 5 ----- 1 file changed, 5 deletions(-) diff --git a/x-pack/plugin/sql/qa/mixed-node/build.gradle b/x-pack/plugin/sql/qa/mixed-node/build.gradle index 0c0869cd4d57d..9965f433031a6 100644 --- a/x-pack/plugin/sql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/sql/qa/mixed-node/build.gradle @@ -62,9 +62,4 @@ for (Version bwcVersion : BuildParams.bwcVersions.wireCompatible.findAll { it.on tasks.register(bwcTaskName(bwcVersion)) { dependsOn "${baseName}#mixedClusterTest" } - - // run these bwc tests as part of the "check" task - tasks.named("check").configure { - dependsOn "${baseName}#mixedClusterTest" - } } From 5c00587edf70f30ffb4a22c19d9212643d351ea9 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 25 Aug 2021 17:07:04 +0100 Subject: [PATCH 011/128] Update Azul JVM on Apple M1 (#76923) Closes #76901. Bump the Azul JVM version for aarch64 M1 i.e. Apple silicon. --- .../gradle/internal/JdkDownloadPluginFuncTest.groovy | 4 ++-- .../org/elasticsearch/gradle/internal/JdkDownloadPlugin.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy index 62bb3f917df8b..f92931f862921 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy @@ -214,8 +214,8 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { return "/java/GA/" + versionPath + "/GPL/" + filename; } else if (vendor.equals(VENDOR_AZUL)) { final String module = isMac(platform) ? "macosx" : platform; - // we only test zulu 15 darwin aarch64 for now - return "/zulu${module.equals('linux') ? '-embedded' : ''}/bin/zulu16.28.11-ca-jdk16.0.0-${module}_${arch}.tar.gz"; + // we only test zulu 16 darwin aarch64 for now + return "/zulu${module.equals('linux') ? '-embedded' : ''}/bin/zulu16.32.15-ca-jdk16.0.2-${module}_${arch}.tar.gz"; } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java index 310703082edda..c2043f15a5773 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java @@ -138,7 +138,7 @@ private void setupRepository(Project project, Jdk jdk) { + zuluPathSuffix + "/bin/zulu" + jdk.getMajor() - + ".28.11-ca-jdk16.0.0-" + + ".32.15-ca-jdk16.0.2-" + azulPlatform(jdk) + "_[classifier].[ext]"; break; From 8a9ea8565745cb811a45b87fbd4bd14cd6516f87 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 25 Aug 2021 09:52:15 -0700 Subject: [PATCH 012/128] Remove no-jdk distributions (#76896) --- .../internal/test/DistroTestPlugin.java | 34 +--- distribution/archives/build.gradle | 56 ++----- .../no-jdk-darwin-aarch64-tar/build.gradle | 2 - .../archives/no-jdk-darwin-tar/build.gradle | 2 - .../archives/no-jdk-linux-tar/build.gradle | 2 - .../archives/no-jdk-windows-zip/build.gradle | 2 - distribution/build.gradle | 33 ++-- distribution/packages/build.gradle | 155 +++++------------- distribution/packages/no-jdk-deb/build.gradle | 2 - distribution/packages/no-jdk-rpm/build.gradle | 2 - .../src/main/resources/logo/elastic.gif | Bin 1865 -> 0 bytes settings.gradle | 6 - 12 files changed, 72 insertions(+), 224 deletions(-) delete mode 100644 distribution/archives/no-jdk-darwin-aarch64-tar/build.gradle delete mode 100644 distribution/archives/no-jdk-darwin-tar/build.gradle delete mode 100644 distribution/archives/no-jdk-linux-tar/build.gradle delete mode 100644 distribution/archives/no-jdk-windows-zip/build.gradle delete mode 100644 distribution/packages/no-jdk-deb/build.gradle delete mode 100644 distribution/packages/no-jdk-rpm/build.gradle delete mode 100755 distribution/packages/no-jdk-rpm/src/main/resources/logo/elastic.gif diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index cdc9907dd4cfa..8458ffe3e7fe4 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -364,38 +364,16 @@ private List configureDistributions(Project project) List currentDistros = new ArrayList<>(); for (Architecture architecture : Architecture.values()) { - ALL_INTERNAL.stream().forEach(type -> { - for (boolean bundledJdk : Arrays.asList(true, false)) { - if (bundledJdk == false) { - // We'll never publish an ARM (aarch64) build without a bundled JDK. - if (architecture == Architecture.AARCH64) { - continue; - } - // All our Docker images include a bundled JDK so it doesn't make sense to test without one. - if (type.isDocker()) { - continue; - } - } - currentDistros.add( - createDistro(distributions, architecture, type, null, bundledJdk, VersionProperties.getElasticsearch()) - ); - } - }); + ALL_INTERNAL.stream().forEach(type -> currentDistros.add( + createDistro(distributions, architecture, type, null, true, VersionProperties.getElasticsearch()) + )); } for (Architecture architecture : Architecture.values()) { for (Platform platform : Arrays.asList(Platform.LINUX, Platform.WINDOWS)) { - for (boolean bundledJdk : Arrays.asList(true, false)) { - if (bundledJdk == false && architecture != Architecture.X64) { - // We will never publish distributions for non-x86 (amd64) platforms - // without a bundled JDK - continue; - } - - currentDistros.add( - createDistro(distributions, architecture, ARCHIVE, platform, bundledJdk, VersionProperties.getElasticsearch()) - ); - } + currentDistros.add( + createDistro(distributions, architecture, ARCHIVE, platform, true, VersionProperties.getElasticsearch()) + ); } } diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index 0c56b8e02a021..c0d575cdbc2cf 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -11,26 +11,26 @@ import java.nio.file.Path apply plugin: 'elasticsearch.internal-distribution-archive-setup' -CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String platform, String architecture, boolean oss, boolean jdk) { +CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String platform, String architecture, boolean isTestDistro) { return copySpec { into("elasticsearch-${version}") { into('lib') { - with libFiles(oss) + with libFiles(isTestDistro) } into('config') { dirMode 0750 fileMode 0660 - with configFiles(distributionType, oss, jdk) + with configFiles(distributionType, isTestDistro) from { dirMode 0750 jvmOptionsDir.getParent() } } into('bin') { - with binFiles(distributionType, oss, jdk) + with binFiles(distributionType, isTestDistro) } - if (jdk) { - into("darwin".equals(platform) ? 'jdk.app' : 'jdk') { + into("darwin".equals(platform) ? 'jdk.app' : 'jdk') { + if (isTestDistro == false) { with jdkFiles(project, platform, architecture) } } @@ -50,11 +50,11 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla include 'README.asciidoc' } from(rootProject.file('licenses')) { - include oss ? 'SSPL-1.0+ELASTIC-LICENSE-2.0.txt' : 'ELASTIC-LICENSE-2.0.txt' + include isTestDistro ? 'SSPL-1.0+ELASTIC-LICENSE-2.0.txt' : 'ELASTIC-LICENSE-2.0.txt' rename { 'LICENSE.txt' } } - with noticeFile(oss, jdk) + with noticeFile(isTestDistro) into('modules') { with modulesFiles } @@ -65,70 +65,42 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla distribution_archives { integTestZip { content { - archiveFiles(transportModulesFiles, 'zip', null, 'x64', true, false) + archiveFiles(transportModulesFiles, 'zip', null, 'x64', true) } } windowsZip { archiveClassifier = 'windows-x86_64' content { - archiveFiles(modulesFiles('windows-x86_64'), 'zip', 'windows', 'x64', false, true) - } - } - - noJdkWindowsZip { - archiveClassifier = 'no-jdk-windows-x86_64' - content { - archiveFiles(modulesFiles('windows-x86_64'), 'zip', 'windows', 'x64', false, false) + archiveFiles(modulesFiles('windows-x86_64'), 'zip', 'windows', 'x64', false) } } darwinTar { archiveClassifier = 'darwin-x86_64' content { - archiveFiles(modulesFiles('darwin-x86_64'), 'tar', 'darwin', 'x64', false, true) + archiveFiles(modulesFiles('darwin-x86_64'), 'tar', 'darwin', 'x64', false) } } darwinAarch64Tar { archiveClassifier = 'darwin-aarch64' content { - archiveFiles(modulesFiles('darwin-aarch64'), 'tar', 'darwin', 'aarch64', false, true) - } - } - - noJdkDarwinTar { - archiveClassifier = 'no-jdk-darwin-x86_64' - content { - archiveFiles(modulesFiles('darwin-x86_64'), 'tar', 'darwin', 'x64', false, false) - } - } - - noJdkDarwinAarch64Tar { - archiveClassifier = 'no-jdk-darwin-aarch64' - content { - archiveFiles(modulesFiles('darwin-aarch64'), 'tar', 'darwin', 'aarch64', false, false) + archiveFiles(modulesFiles('darwin-aarch64'), 'tar', 'darwin', 'aarch64', false) } } linuxAarch64Tar { archiveClassifier = 'linux-aarch64' content { - archiveFiles(modulesFiles('linux-aarch64'), 'tar', 'linux', 'aarch64', false, true) + archiveFiles(modulesFiles('linux-aarch64'), 'tar', 'linux', 'aarch64', false) } } linuxTar { archiveClassifier = 'linux-x86_64' content { - archiveFiles(modulesFiles('linux-x86_64'), 'tar', 'linux', 'x64', false, true) - } - } - - noJdkLinuxTar { - archiveClassifier = 'no-jdk-linux-x86_64' - content { - archiveFiles(modulesFiles('linux-x86_64'), 'tar', 'linux', 'x64', false, false) + archiveFiles(modulesFiles('linux-x86_64'), 'tar', 'linux', 'x64', false) } } } diff --git a/distribution/archives/no-jdk-darwin-aarch64-tar/build.gradle b/distribution/archives/no-jdk-darwin-aarch64-tar/build.gradle deleted file mode 100644 index 4f7400c7eaa0e..0000000000000 --- a/distribution/archives/no-jdk-darwin-aarch64-tar/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// distribution is done in the parent project. \ No newline at end of file diff --git a/distribution/archives/no-jdk-darwin-tar/build.gradle b/distribution/archives/no-jdk-darwin-tar/build.gradle deleted file mode 100644 index 4a6dde5fc0c92..0000000000000 --- a/distribution/archives/no-jdk-darwin-tar/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// distribution is done in the parent project. diff --git a/distribution/archives/no-jdk-linux-tar/build.gradle b/distribution/archives/no-jdk-linux-tar/build.gradle deleted file mode 100644 index 4a6dde5fc0c92..0000000000000 --- a/distribution/archives/no-jdk-linux-tar/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// distribution is done in the parent project. diff --git a/distribution/archives/no-jdk-windows-zip/build.gradle b/distribution/archives/no-jdk-windows-zip/build.gradle deleted file mode 100644 index 4a6dde5fc0c92..0000000000000 --- a/distribution/archives/no-jdk-windows-zip/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// distribution is done in the parent project. diff --git a/distribution/build.gradle b/distribution/build.gradle index 942a718acea96..26093832ae17b 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -69,10 +69,6 @@ def buildDefaultNoticeTaskProvider = tasks.register("buildDefaultNotice", Notice licensesDir new File(project(':distribution').projectDir, 'licenses') } -def buildDefaultNoJdkNoticeTaskProvider = tasks.register("buildDefaultNoJdkNotice", NoticeTask) - -def buildOssNoJdkNoticeTaskProvider = tasks.register("buildOssNoJdkNotice", NoticeTask) - // The :server and :libs projects belong to all distributions tasks.withType(NoticeTask).configureEach { licensesDir project(':server').file('licenses') @@ -86,7 +82,6 @@ tasks.withType(NoticeTask).configureEach { /***************************************************************************** * Modules * *****************************************************************************/ -String ossOutputs = 'build/outputs/oss' String defaultOutputs = 'build/outputs/default' String systemdOutputs = 'build/outputs/systemd' String transportOutputs = 'build/outputs/transport-only' @@ -354,20 +349,20 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { from buildTransportModulesTaskProvider } - configFiles = { distributionType, testDistro, jdk -> + configFiles = { distributionType, isTestDistro -> copySpec { with copySpec { // main config files, processed with distribution specific substitutions from '../src/config' exclude 'log4j2.properties' // this is handled separately below - filter("tokens" : expansionsForDistribution(distributionType, testDistro, jdk), ReplaceTokens.class) + filter("tokens" : expansionsForDistribution(distributionType, isTestDistro), ReplaceTokens.class) } from buildDefaultLog4jConfigTaskProvider from defaultConfigFiles } } - binFiles = { distributionType, testDistro, jdk -> + binFiles = { distributionType, testDistro -> copySpec { // non-windows files, for all distributions with copySpec { @@ -375,7 +370,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { exclude '*.exe' exclude '*.bat' eachFile { it.setMode(0755) } - filter("tokens" : expansionsForDistribution(distributionType, testDistro, jdk), ReplaceTokens.class) + filter("tokens" : expansionsForDistribution(distributionType, testDistro), ReplaceTokens.class) } // windows files, only for zip if (distributionType == 'zip') { @@ -383,7 +378,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { from '../src/bin' include '*.bat' filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf')) - filter("tokens" : expansionsForDistribution(distributionType, testDistro, jdk), ReplaceTokens.class) + filter("tokens" : expansionsForDistribution(distributionType, testDistro), ReplaceTokens.class) } with copySpec { from '../src/bin' @@ -403,16 +398,12 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } } - noticeFile = { testDistro, jdk -> + noticeFile = { testDistro -> copySpec { if (testDistro) { from buildServerNoticeTaskProvider } else { - if (jdk) { - from buildDefaultNoticeTaskProvider - } else { - from buildDefaultNoJdkNoticeTaskProvider - } + from buildDefaultNoticeTaskProvider } } } @@ -466,14 +457,14 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { * */ subprojects { - ext.expansionsForDistribution = { distributionType, testDistro, jdk -> + ext.expansionsForDistribution = { distributionType, isTestDistro -> final String packagingPathData = "path.data: /var/lib/elasticsearch" final String pathLogs = "/var/log/elasticsearch" final String packagingPathLogs = "path.logs: ${pathLogs}" final String packagingLoggc = "${pathLogs}/gc.log" String licenseText - if (testDistro) { + if (isTestDistro) { licenseText = rootProject.file('licenses/SSPL-1.0+ELASTIC-LICENSE-2.0.txt').getText('UTF-8') } else { licenseText = rootProject.file('licenses/ELASTIC-LICENSE-2.0.txt').getText('UTF-8') @@ -541,7 +532,7 @@ subprojects { ], 'es.distribution.flavor': [ - 'def': testDistro ? 'oss' : 'default' + 'def': 'default' ], 'es.distribution.type': [ @@ -552,11 +543,11 @@ subprojects { ], 'es.bundled_jdk': [ - 'def': jdk ? 'true' : 'false' + 'def': 'true' ], 'license.name': [ - 'deb': testDistro ? 'ASL-2.0' : 'Elastic-License' + 'deb': 'Elastic-License' ], 'license.text': [ diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index abae2aae9d032..9815e1cabdc8b 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -46,24 +46,24 @@ plugins { id "nebula.ospackage-base" version "8.6.1" } -void addProcessFilesTask(String type, boolean oss, boolean jdk) { - String packagingFiles = "build/packaging/${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}" +['deb', 'rpm'].each { type -> + String packagingFiles = "build/packaging/${type}" - String taskName = "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files" + String taskName = "process${type.capitalize()}Files" tasks.register(taskName, Copy) { into packagingFiles with copySpec { from 'src/common' from "src/${type}" - filter("tokens" : expansionsForDistribution(type, oss, jdk), ReplaceTokens.class) + filter("tokens" : expansionsForDistribution(type, false), ReplaceTokens.class) } into('etc/elasticsearch') { - with configFiles(type, oss, jdk) + with configFiles(type, false) } - filter("tokens" : expansionsForDistribution(type, oss, jdk), ReplaceTokens.class) + filter("tokens" : expansionsForDistribution(type, false), ReplaceTokens.class) doLast { // create empty dirs, we set the permissions when configuring the packages @@ -78,25 +78,16 @@ void addProcessFilesTask(String type, boolean oss, boolean jdk) { } } -addProcessFilesTask('deb', true, true) -addProcessFilesTask('deb', true, false) -addProcessFilesTask('deb', false, true) -addProcessFilesTask('deb', false, false) -addProcessFilesTask('rpm', true, true) -addProcessFilesTask('rpm', true, false) -addProcessFilesTask('rpm', false, true) -addProcessFilesTask('rpm', false, false) - // Common configuration that is package dependent. This can't go in ospackage // since we have different templated files that need to be consumed, but the structure // is the same -Closure commonPackageConfig(String type, boolean oss, boolean jdk, String architecture) { +def commonPackageConfig(String type, String architecture) { return { onlyIf { OS.current().equals(OS.WINDOWS) == false } - dependsOn "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files" - packageName "elasticsearch${oss ? '-oss' : ''}" + dependsOn "process${type.capitalize()}Files" + packageName "elasticsearch" if (type == 'deb') { if (architecture == 'x64') { arch('amd64') @@ -114,11 +105,10 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk, String archit } } // Follow elasticsearch's file naming convention - String jdkString = jdk ? "" : "no-jdk-" - String prefix = "${architecture == 'aarch64' ? 'aarch64-' : ''}${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}" + String prefix = "${architecture == 'aarch64' ? 'aarch64-' : ''}${type}" destinationDirectory = file("${prefix}/build/distributions") - archiveFileName.value(project.provider({ "${packageName}-${project.version}-${jdkString}${archString}.${type}" } )) - String packagingFiles = "build/packaging/${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}" + archiveFileName.value(project.provider({ "${packageName}-${project.version}-${archString}.${type}" } )) + String packagingFiles = "build/packaging/${type}" String scripts = "${packagingFiles}/scripts" preInstall file("${scripts}/preinst") @@ -133,22 +123,20 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk, String archit // specify it again explicitly for copying common files into('/usr/share/elasticsearch') { into('bin') { - with binFiles(type, oss, jdk) + with binFiles(type, false) } from(rootProject.projectDir) { include 'README.asciidoc' fileMode 0644 } into('lib') { - with libFiles(oss) + with libFiles(false) } into('modules') { with modulesFiles('linux-' + ((architecture == 'x64') ? 'x86_64' : architecture)) } - if (jdk) { - into('jdk') { - with jdkFiles(project, 'linux', architecture) - } + into('jdk') { + with jdkFiles(project, 'linux', architecture) } // we need to specify every intermediate directory in these paths so the package managers know they are explicitly // intended to manage them; otherwise they may be left behind on uninstallation. duplicate calls of the same @@ -178,7 +166,7 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk, String archit assert type == 'rpm' into('/usr/share/elasticsearch') { from(rootProject.file('licenses')) { - include oss ? 'SSPL-1.0+ELASTIC-LICENSE-2.0.txt' : 'ELASTIC-LICENSE-2.0.txt' + include 'ELASTIC-LICENSE-2.0.txt' rename { 'LICENSE.txt' } } fileMode 0644 @@ -189,12 +177,10 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk, String archit configurationFile '/etc/elasticsearch/elasticsearch.yml' configurationFile '/etc/elasticsearch/jvm.options' configurationFile '/etc/elasticsearch/log4j2.properties' - if (oss == false) { - configurationFile '/etc/elasticsearch/role_mapping.yml' - configurationFile '/etc/elasticsearch/roles.yml' - configurationFile '/etc/elasticsearch/users' - configurationFile '/etc/elasticsearch/users_roles' - } + configurationFile '/etc/elasticsearch/role_mapping.yml' + configurationFile '/etc/elasticsearch/roles.yml' + configurationFile '/etc/elasticsearch/users' + configurationFile '/etc/elasticsearch/users_roles' from("${packagingFiles}") { dirMode 02750 into('/etc') @@ -213,7 +199,7 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk, String archit createDirectoryEntry true fileType CONFIG | NOREPLACE } - String envFile = expansionsForDistribution(type, oss, jdk)['path.env'] + String envFile = expansionsForDistribution(type, false)['path.env'] configurationFile envFile into(new File(envFile).getParent()) { fileType CONFIG | NOREPLACE @@ -261,10 +247,10 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk, String archit copyEmptyDir('/usr/share/elasticsearch/plugins', 'root', 'root', 0755) // the oss package conflicts with the default distribution and vice versa - conflicts('elasticsearch' + (oss ? '' : '-oss')) + conflicts('elasticsearch-oss') into '/usr/share/elasticsearch' - with noticeFile(oss, jdk) + with noticeFile(false) } } @@ -300,17 +286,13 @@ ospackage { into '/usr/share/elasticsearch' } -Closure commonDebConfig(boolean oss, boolean jdk, String architecture) { +Closure commonDebConfig(String architecture) { return { - configure(commonPackageConfig('deb', oss, jdk, architecture)) + configure(commonPackageConfig('deb', architecture)) // jdeb does not provide a way to set the License control attribute, and ospackage // silently ignores setting it. Instead, we set the license as "custom field" - if (oss) { - customFields['License'] = 'ASL-2.0' - } else { - customFields['License'] = 'Elastic-License' - } + customFields['License'] = 'Elastic-License' archiveVersion = project.version.replace('-', '~') packageGroup 'web' @@ -323,46 +305,23 @@ Closure commonDebConfig(boolean oss, boolean jdk, String architecture) { into('/usr/share/lintian/overrides') { from('src/deb/lintian/elasticsearch') - if (oss) { - rename('elasticsearch', 'elasticsearch-oss') - } } } } tasks.register('buildAarch64Deb', Deb) { - configure(commonDebConfig(false, true, 'aarch64')) + configure(commonDebConfig('aarch64')) } tasks.register('buildDeb', Deb) { - configure(commonDebConfig(false, true, 'x64')) -} - -tasks.register('buildAarch64OssDeb', Deb) { - configure(commonDebConfig(true, true, 'aarch64')) + configure(commonDebConfig('x64')) } -tasks.register('buildOssDeb', Deb) { - configure(commonDebConfig(true, true, 'x64')) -} - -tasks.register('buildNoJdkDeb', Deb) { - configure(commonDebConfig(false, false, 'x64')) -} - -tasks.register('buildOssNoJdkDeb', Deb) { - configure(commonDebConfig(true, false, 'x64')) -} - -Closure commonRpmConfig(boolean oss, boolean jdk, String architecture) { +Closure commonRpmConfig(String architecture) { return { - configure(commonPackageConfig('rpm', oss, jdk, architecture)) + configure(commonPackageConfig('rpm', architecture)) - if (oss) { - license 'ASL 2.0' - } else { - license 'Elastic License' - } + license 'Elastic License' packageGroup 'Application/Internet' requires '/bin/bash' @@ -384,27 +343,11 @@ Closure commonRpmConfig(boolean oss, boolean jdk, String architecture) { } tasks.register('buildAarch64Rpm', Rpm) { - configure(commonRpmConfig(false, true, 'aarch64')) + configure(commonRpmConfig('aarch64')) } tasks.register('buildRpm', Rpm) { - configure(commonRpmConfig(false, true, 'x64')) -} - -tasks.register('buildAarch64OssRpm', Rpm) { - configure(commonRpmConfig(true, true, 'aarch64')) -} - -tasks.register('buildOssRpm', Rpm) { - configure(commonRpmConfig(true, true, 'x64')) -} - -tasks.register('buildNoJdkRpm', Rpm) { - configure(commonRpmConfig(false, false, 'x64')) -} - -tasks.register('buildOssNoJdkRpm', Rpm) { - configure(commonRpmConfig(true, false, 'x64')) + configure(commonRpmConfig('x64')) } Closure dpkgExists = { it -> new File('/bin/dpkg-deb').exists() || new File('/usr/bin/dpkg-deb').exists() || new File('/usr/local/bin/dpkg-deb').exists() } @@ -481,15 +424,9 @@ subprojects { Path copyrightPath String expectedLicense String licenseFilename - if (project.name.contains('oss-')) { - copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch-oss/copyright") - expectedLicense = "ASL-2.0" - licenseFilename = "SSPL-1.0+ELASTIC-LICENSE-2.0.txt" - } else { - copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch/copyright") - expectedLicense = "Elastic-License" - licenseFilename = "ELASTIC-LICENSE-2.0.txt" - } + copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch/copyright") + expectedLicense = "Elastic-License" + licenseFilename = "ELASTIC-LICENSE-2.0.txt" final List header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/", "Copyright: Elasticsearch B.V. ", "License: " + expectedLicense) @@ -504,11 +441,7 @@ subprojects { onlyIf rpmExists doLast { String licenseFilename - if (project.name.contains('oss-')) { - licenseFilename = "SSPL-1.0+ELASTIC-LICENSE-2.0.txt" - } else { - licenseFilename = "ELASTIC-LICENSE-2.0.txt" - } + licenseFilename = "ELASTIC-LICENSE-2.0.txt" final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/LICENSE.txt") assertLinesInFile(licensePath, licenseLines) @@ -544,11 +477,7 @@ subprojects { exec.standardOutput = output doLast { String expectedLicense - if (project.name.contains('oss-')) { - expectedLicense = "ASL-2.0" - } else { - expectedLicense = "Elastic-License" - } + expectedLicense = "Elastic-License" final Pattern pattern = Pattern.compile("\\s*License: (.+)") final String info = output.toString('UTF-8') final String[] actualLines = info.split("\n") @@ -581,11 +510,7 @@ subprojects { doLast { String license = output.toString('UTF-8') String expectedLicense - if (project.name.contains('oss-')) { - expectedLicense = "ASL 2.0" - } else { - expectedLicense = "Elastic License" - } + expectedLicense = "Elastic License" if (license != expectedLicense) { throw new GradleException("expected license [${expectedLicense}] for [${-> buildDist.get().outputs.files.singleFile}] but was [${license}]") } diff --git a/distribution/packages/no-jdk-deb/build.gradle b/distribution/packages/no-jdk-deb/build.gradle deleted file mode 100644 index 4a6dde5fc0c92..0000000000000 --- a/distribution/packages/no-jdk-deb/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// distribution is done in the parent project. diff --git a/distribution/packages/no-jdk-rpm/build.gradle b/distribution/packages/no-jdk-rpm/build.gradle deleted file mode 100644 index 4a6dde5fc0c92..0000000000000 --- a/distribution/packages/no-jdk-rpm/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// distribution is done in the parent project. diff --git a/distribution/packages/no-jdk-rpm/src/main/resources/logo/elastic.gif b/distribution/packages/no-jdk-rpm/src/main/resources/logo/elastic.gif deleted file mode 100755 index e3b20998d5300fdb08716ab6ac8005d3e1b41e33..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1865 zcmV-P2e$Y}Nk%w1VL1Rf0O$VzEk%zmMvyN@kugb=HB6Q+PnbGUmOE0H6K7Z)W>OVs zSwL2s7;0NXS)N2%pB!yoA8%kGabinip(k}_O=P1gcxhB@rZ9bO>qCoNY^YXlsp?CO zL4$PePLM-|c4v97M~QlAd$H_SnM#a(Zh*H~kc04JpmT`0UY3Y=jJ)w{q+ywiVw#Qb zaHfHgylE*!R8za?{NA zt<3ko9IDV(s9`?f@f7Q>p!`}DAA*|+S8@v+tSwN_}Fd7 z+}rutZur@CuI1g-+3C#Vp!wQ%$===0-`KI|-_GTtx#r;c-G$BPs@dJ`y6EBj--p%c zti0;v`{0lH*yY*d-qq=}zU=1K>b2YD;QQmA#_Z?E?dbjGpycK6$nNR>=AruG>;31b z{O7LV>gCGt>*wh5{p0S^@a_KUvD5MH$vLc_W$j=*7WiJ?!oBt?C$UR<@MYC z@X6ry^#1Ji^YHcX^6&og(e3u#;`jFQ^zr-g_x1Dl{`A@E_xSVm^5psW@b>ug_vZfd z`uO$w>-qWe_xbhr^#Anx?E3rj`1}9${Pg*j2b+R5-W;C`0!pbN^QQVRCz{FFpwi-3e=eF&dQrN+u*wAPo_wJ_blNY zdecwFo<3*#llSc#KBWIN>LE&1WgBw$l142jjSUhcbF_UcIkjrlHt+EH#22r~1`Kd9 zY`B7Pli9m|c*gV>uSMLyD$4mgbN8;ScO(6k1M>GTkZ-#w$ZqQ7Nt8^d*jFPP-J zwX#;M7e;ejTlYeJHmfIN#czyFZ>aZ9(CVsUDo=!M8iR>={!-qiN)2kmdR%(z(DrV4 z)t8pFWr5M5ahQ6=oKwYRe4)Q~=9vEC;}vFx2n?zGq0&y&S9g->+Ev*x3H&yEPM67h z*NFX4XYU#0PdIxx0KkC*Xdpu#-Nb{9RQ}yWhi)W=vqS&~W|(0B6u2OaKe$Awl2Puc zLt#PTq=BJ^EFPG`Ho-+m$vx^Av%(0<%myd%q%pPf;Rkz^>~rI#<91C>9! znCYVg%xLruDr!j91~oJWG3TM3IOz`|da`H%Htn>dk2@LNLWfy&taFb$FC2O(3f>^) zPachC_(6Q!FwsLF>A3UIF#cuXWYj$8l;G*149p|yD5U~$LOl9FLqrY4;sD1r?EvMB zZ7V@SV>2Aw>ZvNCwj)5JF|=b3F+C9LZ4EbcL5w{bk#mU{p%|G<1=4yth^XO&U}gYE zq+<^)Iee>c4Nt_Pk3H`M98M(=wCiakP~F3blmd8RjyhQsU!JYzh zXE-OcNWdQ9%!A0pC=)vbG4m+G@u3q?s8|LX9*{{m`qYu~&Q#c=&Im7i`9n7b-4hN8 z2R1Ov!)Wa6v@Y{-5_AF)SQDT=>BK?8G3eNXh0{(q;j^tABOu2)4n^8dI!}NNHZ1q# z^1=n}A~1!W-nCbMh$5L=WO zpn#q@?sOmaAOnRKf2@*?Kf0ddDn%!vX5nki9jWsC)VVANA1SDosRTc!oG2>#E^{ zO6}Py>fB*nH Dw&FU# diff --git a/settings.gradle b/settings.gradle index 1d8db586bee04..60917b297fd4d 100644 --- a/settings.gradle +++ b/settings.gradle @@ -22,14 +22,10 @@ List projects = [ 'benchmarks', 'distribution:archives:integ-test-zip', 'distribution:archives:windows-zip', - 'distribution:archives:no-jdk-windows-zip', 'distribution:archives:darwin-tar', 'distribution:archives:darwin-aarch64-tar', - 'distribution:archives:no-jdk-darwin-tar', - 'distribution:archives:no-jdk-darwin-aarch64-tar', 'distribution:archives:linux-aarch64-tar', 'distribution:archives:linux-tar', - 'distribution:archives:no-jdk-linux-tar', 'distribution:docker', 'distribution:docker:cloud-docker-export', 'distribution:docker:cloud-docker-aarch64-export', @@ -43,10 +39,8 @@ List projects = [ 'distribution:docker:ubi-docker-export', 'distribution:packages:aarch64-deb', 'distribution:packages:deb', - 'distribution:packages:no-jdk-deb', 'distribution:packages:aarch64-rpm', 'distribution:packages:rpm', - 'distribution:packages:no-jdk-rpm', 'distribution:bwc:bugfix', 'distribution:bwc:maintenance', 'distribution:bwc:minor', From d36f24fbc367c3c51be81ba1c8481d0bd7b5a95e Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 25 Aug 2021 11:35:21 -0700 Subject: [PATCH 013/128] [DOCS] Update datafeed details in ML docs (#76854) --- .../anomaly-detection/apis/close-job.asciidoc | 16 +- .../apis/delete-job.asciidoc | 7 +- .../apis/put-datafeed.asciidoc | 8 +- .../anomaly-detection/apis/put-job.asciidoc | 17 +- .../functions/ml-geo-functions.asciidoc | 6 +- .../ml-configuring-aggregations.asciidoc | 174 ++++++++---------- .../ml-configuring-transform.asciidoc | 92 +++++---- 7 files changed, 152 insertions(+), 168 deletions(-) diff --git a/docs/reference/ml/anomaly-detection/apis/close-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/close-job.asciidoc index d10a83a5972aa..c9687b559b56c 100644 --- a/docs/reference/ml/anomaly-detection/apis/close-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/close-job.asciidoc @@ -7,10 +7,6 @@ ++++ Closes one or more {anomaly-jobs}. -A job can be opened and closed multiple times throughout its lifecycle. - -A closed job cannot receive data or perform analysis -operations, but you can still explore and navigate results. [[ml-close-job-request]] == {api-request-title} @@ -30,13 +26,19 @@ operations, but you can still explore and navigate results. [[ml-close-job-desc]] == {api-description-title} +A job can be opened and closed multiple times throughout its lifecycle. + +A closed job cannot receive data or perform analysis operations, but you can +still explore and navigate results. + You can close multiple {anomaly-jobs} in a single API request by using a group name, a comma-separated list of jobs, or a wildcard expression. You can close all jobs by using `_all` or by specifying `*` as the ``. -If you close an {anomaly-job} whose {dfeed} is running, the request will first -attempt to stop the {dfeed}, as though <> was called with -the same `timeout` and `force` parameters as the close request. +If you close an {anomaly-job} whose {dfeed} is running, the request first tries +to stop the {dfeed}. This behavior is equivalent to calling +<> with the same `timeout` and `force` parameters +as the close job request. When you close a job, it runs housekeeping tasks such as pruning the model history, flushing buffers, calculating final results and persisting the model snapshots. diff --git a/docs/reference/ml/anomaly-detection/apis/delete-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/delete-job.asciidoc index 316bbd287a9d9..585463a409457 100644 --- a/docs/reference/ml/anomaly-detection/apis/delete-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/delete-job.asciidoc @@ -34,9 +34,10 @@ are granted to anyone over the `.ml-*` indices. It is not currently possible to delete multiple jobs using wildcards or a comma separated list. -If you delete a job that has a {dfeed}, the request will first attempt to -delete the {dfeed}, as though <> was called with the same -`timeout` and `force` parameters as this delete request. +If you delete a job that has a {dfeed}, the request first tries to delete the +{dfeed}. This behavior is equivalent to calling +<> with the same `timeout` and `force` +parameters as the delete job request. [[ml-delete-job-path-parms]] == {api-path-parms-title} diff --git a/docs/reference/ml/anomaly-detection/apis/put-datafeed.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-datafeed.asciidoc index 4786dfcef2300..d57c31b8c7c36 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-datafeed.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-datafeed.asciidoc @@ -36,10 +36,10 @@ each interval. See {ml-docs}/ml-delayed-data-detection.html[Handling delayed dat [IMPORTANT] ==== -* You must use {kib} or this API to create a {dfeed}. Do not add a -{dfeed} directly to the `.ml-config` index using the {es} index API. If {es} -{security-features} are enabled, do not give users `write` privileges on the -`.ml-config` index. +* You must use {kib}, this API, or the <> +to create a {dfeed}. Do not add a {dfeed} directly to the `.ml-config` index +using the {es} index API. If {es} {security-features} are enabled, do not give +users `write` privileges on the `.ml-config` index. * When {es} {security-features} are enabled, your {dfeed} remembers which roles the user who created it had at the time of creation and runs the query using those same roles. If you provide diff --git a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc index 87a1cc6f8f36f..dad8471ced53f 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc @@ -19,13 +19,24 @@ Instantiates an {anomaly-job}. Requires the `manage_ml` cluster privilege. This privilege is included in the `machine_learning_admin` built-in role. +If you include a `datafeed_config`, you must also have `read` index privileges +on the source index. + [[ml-put-job-desc]] == {api-description-title} -IMPORTANT: You must use {kib} or this API to create an {anomaly-job}. Do not put +[IMPORTANT] +==== +* You must use {kib} or this API to create an {anomaly-job}. Do not put a job directly to the `.ml-config` index using the {es} index API. If {es} {security-features} are enabled, do not give users `write` privileges on the `.ml-config` index. +* If you include a `datafeed_config` and {es} {security-features} are enabled, +your {dfeed} remembers which roles the user who created it had at the time of +creation and runs the query using those same roles. If you provide +<>, those +credentials are used instead. +==== [[ml-put-job-path-parms]] == {api-path-parms-title} @@ -250,7 +261,9 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=data-description] //End data_description `datafeed_config`:: -(object) The {dfeed} configured for the current {anomaly-job}. +(Optional, object) The {ml-docs}/ml-dfeeds.html[{dfeed}], which retrieves data +from {es} for analysis by the job. You can associate only one {dfeed} with each +{anomaly-job}. + .Properties of `datafeed` [%collapsible%open] diff --git a/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc b/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc index 2dba8a32f75b3..362f697ad3956 100644 --- a/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc +++ b/docs/reference/ml/anomaly-detection/functions/ml-geo-functions.asciidoc @@ -71,8 +71,8 @@ For example, JSON data might contain the following transaction coordinates: In {es}, location data is likely to be stored in `geo_point` fields. For more information, see {ref}/geo-point.html[`geo_point` data type]. This data type is -supported natively in {ml-features}. Specifically, {dfeed} when pulling data from -a `geo_point` field, will transform the data into the appropriate `lat,lon` string -format before sending to the {anomaly-job}. +supported natively in {ml-features}. Specifically, when pulling data from a +`geo_point` field, a {dfeed} will transform the data into the appropriate +`lat,lon` string format before sending to the {anomaly-job}. For more information, see <>. diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-aggregations.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-aggregations.asciidoc index 85de5d1254133..702fb10b2f4f8 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-aggregations.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-aggregations.asciidoc @@ -78,48 +78,30 @@ PUT _ml/anomaly_detectors/farequote }, "data_description": { "time_field":"time" <1> - } -} ----------------------------------- -// TEST[skip:setup:farequote_data] - -<1> The `airline`, `responsetime`, and `time` fields are aggregations. Only the -aggregated fields defined in the `analysis_config` object are analyzed by the -{anomaly-job}. - -NOTE: When the `summary_count_field_name` property is set to a non-null value, -the job expects to receive aggregated input. The property must be set to the -name of the field that contains the count of raw data points that have been -aggregated. It applies to all detectors in the job. - -The aggregations are defined in the {dfeed} as follows: - -[source,console] ----------------------------------- -PUT _ml/datafeeds/datafeed-farequote -{ - "job_id":"farequote", - "indices": ["farequote"], - "aggregations": { - "buckets": { - "date_histogram": { - "field": "time", - "fixed_interval": "360s", - "time_zone": "UTC" - }, - "aggregations": { - "time": { <1> - "max": {"field": "time"} + }, + "datafeed_config":{ + "indices": ["farequote"], + "aggregations": { + "buckets": { + "date_histogram": { + "field": "time", + "fixed_interval": "360s", + "time_zone": "UTC" }, - "airline": { <2> - "terms": { - "field": "airline", - "size": 100 + "aggregations": { + "time": { <2> + "max": {"field": "time"} }, - "aggregations": { - "responsetime": { <3> - "avg": { - "field": "responsetime" + "airline": { <3> + "terms": { + "field": "airline", + "size": 100 + }, + "aggregations": { + "responsetime": { <4> + "avg": { + "field": "responsetime" + } } } } @@ -129,19 +111,27 @@ PUT _ml/datafeeds/datafeed-farequote } } ---------------------------------- -// TEST[skip:setup:farequote_job] +// TEST[skip:setup:farequote_data] -<1> The aggregations have names that match the fields that they operate on. The +<1> The `airline`, `responsetime`, and `time` fields are aggregations. Only the +aggregated fields defined in the `analysis_config` object are analyzed by the +{anomaly-job}. +<2> The aggregations have names that match the fields that they operate on. The `max` aggregation is named `time` and its field also needs to be `time`. -<2> The `term` aggregation is named `airline` and its field is also named +<3> The `term` aggregation is named `airline` and its field is also named `airline`. -<3> The `avg` aggregation is named `responsetime` and its field is also named +<4> The `avg` aggregation is named `responsetime` and its field is also named `responsetime`. +When the `summary_count_field_name` property is set to a non-null value, the job +expects to receive aggregated input. The property must be set to the name of the +field that contains the count of raw data points that have been aggregated. It +applies to all detectors in the job. + TIP: If you are using a `term` aggregation to gather influencer or partition field information, consider using a `composite` aggregation. It performs -better than a `date_histogram` with a nested `term` aggregation and also includes -all the values of the field instead of the top values per bucket. +better than a `date_histogram` with a nested `term` aggregation and also +includes all the values of the field instead of the top values per bucket. [discrete] [[aggs-using-composite]] @@ -153,15 +143,17 @@ For `composite` aggregation support, there must be exactly one `date_histogram` source. That value source must not be sorted in descending order. Additional `composite` aggregation value sources are allowed, such as `terms`. -NOTE: A {dfeed} that uses composite aggregations may not be as performant as datafeeds that use scrolling or -date histogram aggregations. Composite aggregations are optimized -for queries that are either `match_all` or `range` filters. Other types of +NOTE: A {dfeed} that uses composite aggregations may not be as performant as +{dfeeds} that use scrolling or date histogram aggregations. Composite +aggregations are optimized for queries that are either `match_all` or `range` +filters. Other types of queries may cause the `composite` aggregation to be ineffecient. Here is an example that uses a `composite` aggregation instead of a `date_histogram`. -Assuming the same job configuration as above. +This is an example of a job with a {dfeed} that uses a `composite` aggregation +to bucket the metrics based on time and terms: [source,console] ---------------------------------- @@ -178,54 +170,42 @@ PUT _ml/anomaly_detectors/farequote-composite }, "data_description": { "time_field":"time" - } -} ----------------------------------- -// TEST[skip:setup:farequote_data] - -This is an example of a datafeed that uses a `composite` aggregation to bucket -the metrics based on time and terms: - -[source,console] ----------------------------------- -PUT _ml/datafeeds/datafeed-farequote-composite -{ - "job_id": "farequote-composite", - "indices": [ - "farequote" - ], - "aggregations": { - "buckets": { - "composite": { - "size": 1000, <1> - "sources": [ - { - "time_bucket": { <2> - "date_histogram": { - "field": "time", - "fixed_interval": "360s", - "time_zone": "UTC" + }, + "datafeed_config":{ + "indices": ["farequote"], + "aggregations": { + "buckets": { + "composite": { + "size": 1000, <1> + "sources": [ + { + "time_bucket": { <2> + "date_histogram": { + "field": "time", + "fixed_interval": "360s", + "time_zone": "UTC" + } } - } - }, - { - "airline": { <3> - "terms": { - "field": "airline" + }, + { + "airline": { <3> + "terms": { + "field": "airline" + } } } - } - ] - }, - "aggregations": { - "time": { <4> - "max": { - "field": "time" - } + ] }, - "responsetime": { <5> - "avg": { - "field": "responsetime" + "aggregations": { + "time": { <4> + "max": { + "field": "time" + } + }, + "responsetime": { <5> + "avg": { + "field": "responsetime" + } } } } @@ -233,10 +213,8 @@ PUT _ml/datafeeds/datafeed-farequote-composite } } ---------------------------------- -// TEST[skip:setup:farequote_job] - <1> Provide the `size` to the composite agg to control how many resources -are used when aggregating the data. A larger `size` means a faster datafeed but +are used when aggregating the data. A larger `size` means a faster {dfeed} but more cluster resources are used when searching. <2> The required `date_histogram` composite aggregation source. Make sure it is named differently than your desired time field. @@ -364,7 +342,7 @@ When using a `date_histogram` aggregation to bucket by time: "bucket_agg": { ... }, - "aggregations": {] + "aggregations": { "data_histogram_aggregation": { "date_histogram": { "field": "time", diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc index ef442b4b92913..fdaffa92bae2e 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc @@ -107,20 +107,16 @@ PUT _ml/anomaly_detectors/test1 }, "data_description": { "time_field":"@timestamp" - } -} - -PUT _ml/datafeeds/datafeed-test1 -{ - "job_id": "test1", - "indices": [ - "my-index-000001" - ], - "runtime_mappings": { - "total_error_count": { <2> - "type": "long", - "script": { - "source": "emit(doc['error_count'].value + doc['aborted_count'].value)" + }, + "datafeed_config":{ + "datafeed_id": "datafeed-test1", + "indices": ["my-index-000001"], + "runtime_mappings": { + "total_error_count": { <2> + "type": "long", + "script": { + "source": "emit(doc['error_count'].value + doc['aborted_count'].value)" + } } } } @@ -193,18 +189,16 @@ PUT _ml/anomaly_detectors/test2 }, "data_description": { "time_field":"@timestamp" - } -} - -PUT _ml/datafeeds/datafeed-test2 -{ - "job_id": "test2", - "indices": ["my-index-000001"], - "runtime_mappings": { - "my_runtime_field": { - "type": "keyword", - "script": { - "source": "emit(doc['some_field'].value + '_' + doc['another_field'].value)" <2> + }, + "datafeed_config":{ + "datafeed_id": "datafeed-test2", + "indices": ["my-index-000001"], + "runtime_mappings": { + "my_runtime_field": { + "type": "keyword", + "script": { + "source": "emit(doc['some_field'].value + '_' + doc['another_field'].value)" <2> + } } } } @@ -438,18 +432,16 @@ PUT _ml/anomaly_detectors/test3 }, "data_description": { "time_field":"@timestamp" - } -} - -PUT _ml/datafeeds/datafeed-test3 -{ - "job_id": "test3", - "indices": ["my-index-000001"], - "runtime_mappings": { - "my_coordinates": { - "type": "keyword", - "script": { - "source": "emit(doc['coords.lat'].value + ',' + doc['coords.lon'].value)" + }, + "datafeed_config":{ + "datafeed_id": "datafeed-test3", + "indices": ["my-index-000001"], + "runtime_mappings": { + "my_coordinates": { + "type": "keyword", + "script": { + "source": "emit(doc['coords.lat'].value + ',' + doc['coords.lon'].value)" + } } } } @@ -501,19 +493,17 @@ PUT _ml/anomaly_detectors/test4 }, "data_description": { "time_field":"@timestamp" - } -} - -PUT _ml/datafeeds/datafeed-test4 -{ - "job_id": "test4", - "indices": ["my-index-000001"], - "script_fields":{ - "sub":{ - "script":"return domainSplit(doc['query'].value).get(0);" - }, - "hrd":{ - "script":"return domainSplit(doc['query'].value).get(1);" + }, + "datafeed_config":{ + "datafeed_id": "datafeed-test4", + "indices": ["my-index-000001"], + "script_fields":{ + "sub":{ + "script":"return domainSplit(doc['query'].value).get(0);" + }, + "hrd":{ + "script":"return domainSplit(doc['query'].value).get(1);" + } } } } From 21230cb4bd6a496883c01ea2d6d86f52af43cd52 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Thu, 26 Aug 2021 14:45:33 +1000 Subject: [PATCH 014/128] Handle a edge case for validation of API key role descriptors (#76959) This PR fixes a BWC edge case: In a mixed cluster, e.g. rolling upgrade, API keys can sometimes fail to validate due to mismatch of role descriptors depending on where the request is initially authenticated. --- .../authc/service/ServiceAccountIT.java | 25 +++- .../authc/apikey/ApiKeySingleNodeTests.java | 111 ++++++++++++++++++ .../xpack/security/authc/ApiKeyService.java | 25 +++- .../authc/service/ElasticServiceAccounts.java | 2 +- .../authc/support/ApiKeyGenerator.java | 41 +++++-- .../authz/store/CompositeRolesStore.java | 2 +- .../xpack/restart/FullClusterRestartIT.java | 40 +++++++ 7 files changed, 226 insertions(+), 20 deletions(-) diff --git a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java index e2645ec3226cb..1624c25610a7a 100644 --- a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java +++ b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.core.PathUtils; import org.elasticsearch.common.settings.SecureString; @@ -32,7 +31,9 @@ import java.io.IOException; import java.net.URISyntaxException; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; +import java.util.Base64; import java.util.List; import java.util.Locale; import java.util.Map; @@ -398,10 +399,30 @@ public void testManageOwnApiKey() throws IOException { createApiKeyRequest1.setOptions(requestOptions); final Response createApiKeyResponse1 = client().performRequest(createApiKeyRequest1); assertOK(createApiKeyResponse1); - final String apiKeyId1 = (String) responseAsMap(createApiKeyResponse1).get("id"); + final Map createApiKeyResponseMap1 = responseAsMap(createApiKeyResponse1); + final String apiKeyId1 = (String) createApiKeyResponseMap1.get("id"); assertApiKeys(apiKeyId1, "key-1", false, requestOptions); + final String base64ApiKeyKeyValue = Base64.getEncoder().encodeToString( + (apiKeyId1 + ":" + createApiKeyResponseMap1.get("api_key")).getBytes(StandardCharsets.UTF_8)); + + // API key can monitor cluster + final Request mainRequest = new Request("GET", "/"); + mainRequest.setOptions(mainRequest.getOptions().toBuilder().addHeader( + "Authorization", "ApiKey " + base64ApiKeyKeyValue + )); + assertOK(client().performRequest(mainRequest)); + + // API key cannot get user + final Request getUserRequest = new Request("GET", "_security/user"); + getUserRequest.setOptions(getUserRequest.getOptions().toBuilder().addHeader( + "Authorization", "ApiKey " + base64ApiKeyKeyValue + )); + final ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(getUserRequest)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(403)); + assertThat(e.getMessage(), containsString("is unauthorized for API key")); + final Request invalidateApiKeysRequest = new Request("DELETE", "_security/api_key"); invalidateApiKeysRequest.setJsonEntity("{\"ids\":[\"" + apiKeyId1 + "\"],\"owner\":true}"); invalidateApiKeysRequest.setOptions(requestOptions); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java index 02e3b0bc2290a..29d594232a358 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java @@ -7,21 +7,52 @@ package org.elasticsearch.xpack.security.authc.apikey; +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.main.MainAction; +import org.elasticsearch.action.main.MainRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.SecuritySingleNodeTestCase; +import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.action.CreateApiKeyAction; import org.elasticsearch.xpack.core.security.action.CreateApiKeyRequest; +import org.elasticsearch.xpack.core.security.action.CreateApiKeyResponse; +import org.elasticsearch.xpack.core.security.action.GrantApiKeyAction; +import org.elasticsearch.xpack.core.security.action.GrantApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.QueryApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.QueryApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.QueryApiKeyResponse; +import org.elasticsearch.xpack.core.security.action.service.CreateServiceAccountTokenAction; +import org.elasticsearch.xpack.core.security.action.service.CreateServiceAccountTokenRequest; +import org.elasticsearch.xpack.core.security.action.service.CreateServiceAccountTokenResponse; +import org.elasticsearch.xpack.core.security.action.user.PutUserAction; +import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; +import org.elasticsearch.xpack.core.security.authc.support.Hasher; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.security.authc.service.ServiceAccountService; +import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.time.Instant; +import java.util.Base64; +import java.util.List; +import java.util.Map; +import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.SECURITY_MAIN_ALIAS; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; public class ApiKeySingleNodeTests extends SecuritySingleNodeTestCase { @@ -53,4 +84,84 @@ public void testQueryWithExpiredKeys() throws InterruptedException { assertThat(queryApiKeyResponse.getItems()[0].getApiKey().getName(), equalTo("long-lived")); assertThat(queryApiKeyResponse.getItems()[0].getSortValues(), emptyArray()); } + + public void testCreatingApiKeyWithNoAccess() { + final PutUserRequest putUserRequest = new PutUserRequest(); + final String username = randomAlphaOfLength(8); + putUserRequest.username(username); + final SecureString password = new SecureString("super-strong-password".toCharArray()); + putUserRequest.passwordHash(Hasher.PBKDF2.hash(password)); + putUserRequest.roles(Strings.EMPTY_ARRAY); + client().execute(PutUserAction.INSTANCE, putUserRequest).actionGet(); + + final GrantApiKeyRequest grantApiKeyRequest = new GrantApiKeyRequest(); + grantApiKeyRequest.getGrant().setType("password"); + grantApiKeyRequest.getGrant().setUsername(username); + grantApiKeyRequest.getGrant().setPassword(password); + grantApiKeyRequest.getApiKeyRequest().setName(randomAlphaOfLength(8)); + grantApiKeyRequest.getApiKeyRequest().setRoleDescriptors(List.of( + new RoleDescriptor("x", new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[]{ + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").allowRestrictedIndices(true).build() + }, + null, null, null, null, null))); + final CreateApiKeyResponse createApiKeyResponse = client().execute(GrantApiKeyAction.INSTANCE, grantApiKeyRequest).actionGet(); + + final String base64ApiKeyKeyValue = Base64.getEncoder().encodeToString( + (createApiKeyResponse.getId() + ":" + createApiKeyResponse.getKey().toString()).getBytes(StandardCharsets.UTF_8)); + + // No cluster access + final ElasticsearchSecurityException e1 = expectThrows( + ElasticsearchSecurityException.class, + () -> client().filterWithHeader(Map.of("Authorization", "ApiKey " + base64ApiKeyKeyValue)) + .execute(MainAction.INSTANCE, new MainRequest()) + .actionGet()); + assertThat(e1.status().getStatus(), equalTo(403)); + assertThat(e1.getMessage(), containsString("is unauthorized for API key")); + + // No index access + final ElasticsearchSecurityException e2 = expectThrows( + ElasticsearchSecurityException.class, + () -> client().filterWithHeader(Map.of("Authorization", "ApiKey " + base64ApiKeyKeyValue)) + .execute(CreateIndexAction.INSTANCE, new CreateIndexRequest( + randomFrom(randomAlphaOfLengthBetween(3, 8), SECURITY_MAIN_ALIAS))) + .actionGet()); + assertThat(e2.status().getStatus(), equalTo(403)); + assertThat(e2.getMessage(), containsString("is unauthorized for API key")); + } + + public void testServiceAccountApiKey() throws IOException { + final CreateServiceAccountTokenRequest createServiceAccountTokenRequest = + new CreateServiceAccountTokenRequest("elastic", "fleet-server", randomAlphaOfLength(8)); + final CreateServiceAccountTokenResponse createServiceAccountTokenResponse = + client().execute(CreateServiceAccountTokenAction.INSTANCE, createServiceAccountTokenRequest).actionGet(); + + final CreateApiKeyResponse createApiKeyResponse = + client().filterWithHeader(Map.of("Authorization", "Bearer " + createServiceAccountTokenResponse.getValue())) + .execute(CreateApiKeyAction.INSTANCE, new CreateApiKeyRequest(randomAlphaOfLength(8), null, null)) + .actionGet(); + + final Map apiKeyDocument = getApiKeyDocument(createApiKeyResponse.getId()); + + @SuppressWarnings("unchecked") + final Map fleetServerRoleDescriptor = + (Map) apiKeyDocument.get("limited_by_role_descriptors"); + assertThat(fleetServerRoleDescriptor.size(), equalTo(1)); + assertThat(fleetServerRoleDescriptor, hasKey("elastic/fleet-server")); + + @SuppressWarnings("unchecked") + final Map descriptor = (Map) fleetServerRoleDescriptor.get("elastic/fleet-server"); + + final RoleDescriptor roleDescriptor = RoleDescriptor.parse("elastic/fleet-server", + XContentTestUtils.convertToXContent(descriptor, XContentType.JSON), + false, + XContentType.JSON); + assertThat(roleDescriptor, equalTo(ServiceAccountService.getServiceAccounts().get("elastic/fleet-server").roleDescriptor())); + } + + private Map getApiKeyDocument(String apiKeyId) { + final GetResponse getResponse = + client().execute(GetAction.INSTANCE, new GetRequest(".security-7", apiKeyId)).actionGet(); + return getResponse.getSource(); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 2b13cbe997471..94cefa6f37428 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -88,6 +88,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.core.security.authc.service.ServiceAccountSettings; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.user.User; @@ -179,6 +180,18 @@ public class ApiKeyService { public static final Setting DOC_CACHE_TTL_SETTING = Setting.timeSetting("xpack.security.authc.api_key.doc_cache.ttl", TimeValue.timeValueMinutes(5), TimeValue.timeValueMinutes(0), TimeValue.timeValueMinutes(15), Property.NodeScope); + // This following fixed role descriptor is for fleet-server BWC on and before 7.14. + // It is fixed and must NOT be updated when the fleet-server service account updates. + private static final BytesArray FLEET_SERVER_ROLE_DESCRIPTOR_BYTES_V_7_14 = new BytesArray( + "{\"elastic/fleet-server\":{\"cluster\":[\"monitor\",\"manage_own_api_key\"]," + + "\"indices\":[{\"names\":[\"logs-*\",\"metrics-*\",\"traces-*\",\"synthetics-*\"," + + "\".logs-endpoint.diagnostic.collection-*\"]," + + "\"privileges\":[\"write\",\"create_index\",\"auto_configure\"],\"allow_restricted_indices\":false}," + + "{\"names\":[\".fleet-*\"],\"privileges\":[\"read\",\"write\",\"monitor\",\"create_index\",\"auto_configure\"]," + + "\"allow_restricted_indices\":false}],\"applications\":[],\"run_as\":[],\"metadata\":{}," + + "\"transient_metadata\":{\"enabled\":true}}}" + ); + private final Clock clock; private final Client client; private final SecurityIndexManager securityIndex; @@ -508,9 +521,15 @@ public Tuple getApiKeyIdAndRoleBytes(Authentication auth .onOrAfter(VERSION_API_KEY_ROLES_AS_BYTES) : "This method only applies to authentication objects created on or after v7.9.0"; final Map metadata = authentication.getMetadata(); - return new Tuple<>( - (String) metadata.get(API_KEY_ID_KEY), - (BytesReference) metadata.get(limitedBy ? API_KEY_LIMITED_ROLE_DESCRIPTORS_KEY : API_KEY_ROLE_DESCRIPTORS_KEY)); + final BytesReference bytesReference = + (BytesReference) metadata.get(limitedBy ? API_KEY_LIMITED_ROLE_DESCRIPTORS_KEY : API_KEY_ROLE_DESCRIPTORS_KEY); + if (limitedBy && bytesReference.length() == 2 && "{}".equals(bytesReference.utf8ToString())) { + if (ServiceAccountSettings.REALM_NAME.equals(metadata.get(API_KEY_CREATOR_REALM_NAME)) + && "elastic/fleet-server".equals(authentication.getUser().principal())) { + return new Tuple<>((String) metadata.get(API_KEY_ID_KEY), FLEET_SERVER_ROLE_DESCRIPTOR_BYTES_V_7_14); + } + } + return new Tuple<>((String) metadata.get(API_KEY_ID_KEY), bytesReference); } public static class ApiKeyRoleDescriptors { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java index 96ce558cae9ea..b548adef183dc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java @@ -48,7 +48,7 @@ final class ElasticServiceAccounts { new ElasticServiceAccount("kibana", ReservedRolesStore.kibanaSystemRoleDescriptor(NAMESPACE + "/kibana")); static final Map ACCOUNTS = List.of(FLEET_ACCOUNT, KIBANA_SYSTEM_ACCOUNT).stream() - .collect(Collectors.toMap(a -> a.id().asPrincipal(), Function.identity()));; + .collect(Collectors.toMap(a -> a.id().asPrincipal(), Function.identity())); private ElasticServiceAccounts() {} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/ApiKeyGenerator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/ApiKeyGenerator.java index 71afd11c21696..cf7223381bd3a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/ApiKeyGenerator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/ApiKeyGenerator.java @@ -14,13 +14,17 @@ import org.elasticsearch.xpack.core.security.action.CreateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.CreateApiKeyResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.service.ServiceAccountSettings; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; import org.elasticsearch.xpack.security.authc.ApiKeyService; +import org.elasticsearch.xpack.security.authc.service.ServiceAccount; +import org.elasticsearch.xpack.security.authc.service.ServiceAccountService; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import java.util.Arrays; import java.util.HashSet; +import java.util.Set; public class ApiKeyGenerator { @@ -40,20 +44,31 @@ public void generateApiKey(Authentication authentication, CreateApiKeyRequest re return; } apiKeyService.ensureEnabled(); - rolesStore.getRoleDescriptors(new HashSet<>(Arrays.asList(authentication.getUser().roles())), - ActionListener.wrap(roleDescriptors -> { - for (RoleDescriptor rd : roleDescriptors) { - try { - DLSRoleQueryValidator.validateQueryField(rd.getIndicesPrivileges(), xContentRegistry); - } catch (ElasticsearchException | IllegalArgumentException e) { - listener.onFailure(e); - return; - } - } - apiKeyService.createApiKey(authentication, request, roleDescriptors, listener); - }, - listener::onFailure)); + final ActionListener> roleDescriptorsListener = ActionListener.wrap(roleDescriptors -> { + for (RoleDescriptor rd : roleDescriptors) { + try { + DLSRoleQueryValidator.validateQueryField(rd.getIndicesPrivileges(), xContentRegistry); + } catch (ElasticsearchException | IllegalArgumentException e) { + listener.onFailure(e); + return; + } + } + apiKeyService.createApiKey(authentication, request, roleDescriptors, listener); + }, listener::onFailure); + + if (ServiceAccountSettings.REALM_NAME.equals(authentication.getSourceRealm().getName())) { + final ServiceAccount serviceAccount = ServiceAccountService.getServiceAccounts().get(authentication.getUser().principal()); + if (serviceAccount == null) { + roleDescriptorsListener.onFailure(new ElasticsearchSecurityException( + "the authentication is created by a service account that does not exist: [" + + authentication.getUser().principal() + "]")); + } else { + roleDescriptorsListener.onResponse(Set.of(serviceAccount.roleDescriptor())); + } + } else { + rolesStore.getRoleDescriptors(new HashSet<>(Arrays.asList(authentication.getUser().roles())), roleDescriptorsListener); + } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index c77e42ed45e96..223390fde0749 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -299,7 +299,7 @@ private void getRolesForApiKey(Authentication authentication, ActionListener roleActionListener.onResponse( - limitedByRole == Role.EMPTY ? role : LimitedRole.createLimitedRole(role, limitedByRole)), + LimitedRole.createLimitedRole(role, limitedByRole)), roleActionListener::onFailure )); } diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index f35755c596d78..d01d4ba23c7f6 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -259,6 +259,46 @@ public void testWatcherWithApiKey() throws Exception { } } + public void testServiceAccountApiKey() throws IOException { + assumeTrue("no service accounts in versions before " + Version.V_7_13_0, getOldClusterVersion().onOrAfter(Version.V_7_13_0)); + if (isRunningAgainstOldCluster()) { + final Request createServiceTokenRequest = new Request("POST", "/_security/service/elastic/fleet-server/credential/token"); + final Response createServiceTokenResponse = client().performRequest(createServiceTokenRequest); + assertOK(createServiceTokenResponse); + @SuppressWarnings("unchecked") + final String serviceToken = ((Map) responseAsMap(createServiceTokenResponse).get("token")).get("value"); + final Request createApiKeyRequest = new Request("PUT", "/_security/api_key"); + createApiKeyRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + serviceToken)); + createApiKeyRequest.setJsonEntity("{\"name\":\"key-1\"}"); + final Response createApiKeyResponse = client().performRequest(createApiKeyRequest); + final Map createApiKeyResponseMap = entityAsMap(createApiKeyResponse); + final String authHeader = "ApiKey " + Base64.getEncoder().encodeToString( + (createApiKeyResponseMap.get("id") + ":" + createApiKeyResponseMap.get("api_key")).getBytes(StandardCharsets.UTF_8)); + + final Request indexRequest = new Request("PUT", "/api_keys/_doc/key-1"); + indexRequest.setJsonEntity("{\"auth_header\":\"" + authHeader + "\"}"); + assertOK(client().performRequest(indexRequest)); + } else { + final Request getRequest = new Request("GET", "/api_keys/_doc/key-1"); + final Response getResponse = client().performRequest(getRequest); + assertOK(getResponse); + final Map getResponseMap = responseAsMap(getResponse); + @SuppressWarnings("unchecked") + final String authHeader = ((Map) getResponseMap.get("_source")).get("auth_header"); + + final Request mainRequest = new Request("GET", "/"); + mainRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", authHeader)); + assertOK(client().performRequest(mainRequest)); + + final Request getUserRequest = new Request("GET", "/_security/user"); + getUserRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", authHeader)); + final ResponseException e = + expectThrows(ResponseException.class, () -> client().performRequest(getUserRequest)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(403)); + assertThat(e.getMessage(), containsString("is unauthorized")); + } + } + /** * Tests that a RollUp job created on a old cluster is correctly restarted after the upgrade. */ From 99c85f8e70216e4d3750903313a88e093cec4bc4 Mon Sep 17 00:00:00 2001 From: Przemko Robakowski Date: Thu, 26 Aug 2021 08:47:37 +0200 Subject: [PATCH 015/128] Fix disabling GeoIP downloader through elasticsearch.yml (#76924) This change allows user to disable GeoIP downloader using elasticsearch.yml and it deletes .geoip_databases index if downloader is disabled. Closes #76586 --- .../ingest/geoip/GeoIpDownloader.java | 4 +- .../geoip/GeoIpDownloaderTaskExecutor.java | 55 +++++++++++++++---- 2 files changed, 44 insertions(+), 15 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java index b8ab92c3c65ee..21513c3433a04 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; -import org.elasticsearch.client.OriginSettingClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.settings.Setting; @@ -32,7 +31,6 @@ import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; -import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.geoip.GeoIpTaskState.Metadata; import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStats; import org.elasticsearch.persistent.AllocatedPersistentTask; @@ -84,7 +82,7 @@ public class GeoIpDownloader extends AllocatedPersistentTask { long id, String type, String action, String description, TaskId parentTask, Map headers) { super(id, type, action, description, parentTask, headers); this.httpClient = httpClient; - this.client = new OriginSettingClient(client, IngestService.INGEST_ORIGIN); + this.client = client; this.clusterService = clusterService; this.threadPool = threadPool; endpoint = ENDPOINT_SETTING.get(settings); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index 02e04bccb3e65..5ef7077406b1e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -11,13 +11,17 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.client.OriginSettingClient; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.ingest.IngestService; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; @@ -29,6 +33,7 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.ingest.geoip.GeoIpDownloader.DATABASES_INDEX; import static org.elasticsearch.ingest.geoip.GeoIpDownloader.GEOIP_DOWNLOADER; /** @@ -54,15 +59,14 @@ public final class GeoIpDownloaderTaskExecutor extends PersistentTasksExecutor { }); } else { - persistentTasksService.sendRemoveRequest(GEOIP_DOWNLOADER, ActionListener.wrap(r -> { - }, e -> logger.error("failed to remove geoip task", e))); + stopTask(() -> { + }); } } @@ -86,23 +90,33 @@ protected void nodeOperation(AllocatedPersistentTask task, GeoIpTaskParams param currentTask.set(downloader); GeoIpTaskState geoIpTaskState = state == null ? GeoIpTaskState.EMPTY : (GeoIpTaskState) state; downloader.setState(geoIpTaskState); - downloader.runDownloader(); + if (ENABLED_SETTING.get(clusterService.state().metadata().settings(), settings)) { + downloader.runDownloader(); + } } @Override protected GeoIpDownloader createTask(long id, String type, String action, TaskId parentTaskId, - PersistentTasksCustomMetadata.PersistentTask taskInProgress, - Map headers) { + PersistentTasksCustomMetadata.PersistentTask taskInProgress, + Map headers) { return new GeoIpDownloader(client, httpClient, clusterService, threadPool, settings, id, type, action, getDescription(taskInProgress), parentTaskId, headers); } @Override public void clusterChanged(ClusterChangedEvent event) { + if(event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)){ + //wait for state recovered + return; + } //bootstrap downloader after first cluster start clusterService.removeListener(this); - if (event.localNodeMaster() && ENABLED_SETTING.get(event.state().getMetadata().settings())) { - startTask(() -> clusterService.addListener(this)); + if (event.localNodeMaster()) { + if (ENABLED_SETTING.get(event.state().getMetadata().settings(), settings)) { + startTask(() -> clusterService.addListener(this)); + } else { + stopTask(() -> clusterService.addListener(this)); + } } } @@ -116,7 +130,24 @@ private void startTask(Runnable onFailure) { })); } - public GeoIpDownloader getCurrentTask(){ + private void stopTask(Runnable onFailure) { + ActionListener> listener = ActionListener.wrap(r -> { + }, e -> { + if (e instanceof ResourceNotFoundException == false) { + logger.error("failed to remove geoip downloader task", e); + onFailure.run(); + } + }); + persistentTasksService.sendRemoveRequest(GEOIP_DOWNLOADER, ActionListener.runAfter(listener, () -> + client.admin().indices().prepareDelete(DATABASES_INDEX).execute(ActionListener.wrap(rr -> { + }, e -> { + if (e instanceof ResourceNotFoundException == false) { + logger.warn("failed to remove " + DATABASES_INDEX, e); + } + })))); + } + + public GeoIpDownloader getCurrentTask() { return currentTask.get(); } } From 0e50d2041ce18567881c0f93a14f0ca05b1f7e6e Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Thu, 26 Aug 2021 09:05:19 +0200 Subject: [PATCH 016/128] Expand the minimum utc timestamp used in fetching timezone transitions (#75584) when rounding UTC timestamps we convert a timestamp from UTC to local, round this to closest midnight, then we convert back to UTC. This means, that for a timestamp close to a DST we need to make sure we collected a transition that will be needed when converting back to UTC. To do this, we decrease the minUtcMillis by 2* unit to sure that the additional transition that could affect the timestamp is also fetched and the correct minimum is used in further lookups closes #73995 --- .../org/elasticsearch/common/LocalTimeOffset.java | 7 +++++-- .../java/org/elasticsearch/common/Rounding.java | 9 ++++++++- .../org/elasticsearch/common/RoundingTests.java | 14 ++++++++++++++ 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/LocalTimeOffset.java b/server/src/main/java/org/elasticsearch/common/LocalTimeOffset.java index ebf777b6f3b2f..2cee860a1e790 100644 --- a/server/src/main/java/org/elasticsearch/common/LocalTimeOffset.java +++ b/server/src/main/java/org/elasticsearch/common/LocalTimeOffset.java @@ -29,8 +29,9 @@ * utc. So converting from utc is as simple as adding the offset. *

* Getting from local time back to utc is harder. Most local times happen once. - * But some local times happen twice. And some don't happen at all. Take, for - * example, the time in my house. Most days I don't touch my clocks and I'm a + * But some local times happen twice (DST overlap). + * And some don't happen at all (DST gap). Take, for example, + * the time in my house. Most days I don't touch my clocks and I'm a * constant offset from UTC. But once in the fall at 2am I roll my clock back. * So at 5am utc my clocks say 1am. Then at 6am utc my clocks say 1am AGAIN. * I do similarly terrifying things again in the spring when I skip my clocks @@ -38,6 +39,8 @@ *

* So there are two methods to convert from local time back to utc, * {@link #localToUtc(long, Strategy)} and {@link #localToUtcInThisOffset(long)}. + * @see ZoneOffsetTransition#isGap() + * @see ZoneOffsetTransition#isOverlap() */ public abstract class LocalTimeOffset { /** diff --git a/server/src/main/java/org/elasticsearch/common/Rounding.java b/server/src/main/java/org/elasticsearch/common/Rounding.java index 55b720a682ef7..c62fabd8a58c8 100644 --- a/server/src/main/java/org/elasticsearch/common/Rounding.java +++ b/server/src/main/java/org/elasticsearch/common/Rounding.java @@ -515,7 +515,14 @@ public Prepared prepare(long minUtcMillis, long maxUtcMillis) { } private TimeUnitPreparedRounding prepareOffsetOrJavaTimeRounding(long minUtcMillis, long maxUtcMillis) { - long minLookup = minUtcMillis - unit.extraLocalOffsetLookup(); + /* + minUtcMillis has to be decreased by 2 units. + This is because if a minUtcMillis can be rounded down up to unit.extraLocalOffsetLookup + and that rounding down might still fall within DST gap/overlap. + Meaning that minUtcMillis has to be decreased by additional unit + so that the transition just before the minUtcMillis is applied + */ + long minLookup = minUtcMillis - 2 * unit.extraLocalOffsetLookup(); long maxLookup = maxUtcMillis; long unitMillis = 0; diff --git a/server/src/test/java/org/elasticsearch/common/RoundingTests.java b/server/src/test/java/org/elasticsearch/common/RoundingTests.java index 32cc823e79292..c5591b2cc2ebf 100644 --- a/server/src/test/java/org/elasticsearch/common/RoundingTests.java +++ b/server/src/test/java/org/elasticsearch/common/RoundingTests.java @@ -232,6 +232,20 @@ public void testRandomTimeUnitRounding() { } } + /** + * This test chooses a date in the middle of the transition, so that we can test + * if the transition which is before the minLookup, but still should be applied + * is not skipped + */ + public void testRoundingAroundDST() { + Rounding.DateTimeUnit unit = Rounding.DateTimeUnit.DAY_OF_MONTH; + ZoneId tz = ZoneId.of("Canada/Newfoundland"); + long minLookup = 688618001000L; // 1991-10-28T02:46:41.527Z + long maxLookup = 688618001001L; // +1sec + // there is a Transition[Overlap at 1991-10-27T00:01-02:30 to -03:30] ” + assertUnitRoundingSameAsJavaUtilTimeImplementation(unit, tz, minLookup, maxLookup); + } + private void assertUnitRoundingSameAsJavaUtilTimeImplementation(Rounding.DateTimeUnit unit, ZoneId tz, long start, long end) { Rounding rounding = new Rounding.TimeUnitRounding(unit, tz); Rounding.Prepared prepared = rounding.prepare(start, end); From 4974a7cd7bb47f3d1921e77a835d30cc6b1539ff Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 26 Aug 2021 09:08:10 +0200 Subject: [PATCH 017/128] [Transform] Reduce indexes to query based on checkpoints (#75839) Continuous transform reduce the amount of data to query for by detecting what has been changed since the last checkpoint. This information is used to inject queries that narrow the scope. The query is send to all configured indices. This change reduces the indexes to call using checkpoint information. The number of network calls go down which in addition to performance reduces the probability of a failure. This change mainly helps the transforms of type latest, pivot transform require additional changes planned for later. --- .../transforms/TransformCheckpoint.java | 20 ++ .../transforms/TransformCheckpointTests.java | 72 ++++++ .../transforms/ClientTransformIndexer.java | 76 ++++-- .../xpack/transform/transforms/Function.java | 19 +- .../transforms/TransformIndexer.java | 45 ++-- .../latest/LatestChangeCollector.java | 14 +- .../CompositeBucketsChangeCollector.java | 16 +- .../ClientTransformIndexerTests.java | 9 +- .../TransformIndexerFailureHandlingTests.java | 2 +- .../latest/LatestChangeCollectorTests.java | 225 +++++++++++++++++- .../CompositeBucketsChangeCollectorTests.java | 34 ++- 11 files changed, 461 insertions(+), 71 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpoint.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpoint.java index 337ab43037c1a..f3adabfacb250 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpoint.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpoint.java @@ -21,10 +21,13 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; +import java.util.Set; import java.util.TreeMap; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; @@ -312,6 +315,23 @@ public static long getBehind(TransformCheckpoint oldCheckpoint, TransformCheckpo return newCheckPointOperationsSum - oldCheckPointOperationsSum; } + public static Collection getChangedIndices(TransformCheckpoint oldCheckpoint, TransformCheckpoint newCheckpoint) { + if (oldCheckpoint.isEmpty()) { + return newCheckpoint.indicesCheckpoints.keySet(); + } + + Set indices = new HashSet<>(); + + for (Entry entry : newCheckpoint.indicesCheckpoints.entrySet()) { + // compare against the old checkpoint + if (Arrays.equals(entry.getValue(), oldCheckpoint.indicesCheckpoints.get(entry.getKey())) == false) { + indices.add(entry.getKey()); + } + } + + return indices; + } + private static Map readCheckpoints(Map readMap) { Map checkpoints = new TreeMap<>(); for (Map.Entry e : readMap.entrySet()) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointTests.java index ee14d27d57b6f..69669ae758a78 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointTests.java @@ -19,9 +19,13 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.TreeMap; +import java.util.stream.Collectors; import static org.elasticsearch.test.TestMatchers.matchesPattern; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThan; public class TransformCheckpointTests extends AbstractSerializingTransformTestCase { @@ -191,6 +195,74 @@ public void testGetBehind() { assertEquals((indices - 2) * shards * 10L, TransformCheckpoint.getBehind(checkpointOld, checkpointTransientNew)); } + public void testGetChangedIndices() { + String baseIndexName = randomAlphaOfLength(8); + String id = randomAlphaOfLengthBetween(1, 10); + long timestamp = randomNonNegativeLong(); + + TreeMap checkpointsByIndexOld = new TreeMap<>(); + TreeMap checkpointsByIndexNew = new TreeMap<>(); + + int indices = randomIntBetween(5, 20); + int shards = randomIntBetween(1, 20); + + for (int i = 0; i < indices; ++i) { + List checkpoints1 = new ArrayList<>(); + List checkpoints2 = new ArrayList<>(); + + for (int j = 0; j < shards; ++j) { + long shardCheckpoint = randomLongBetween(-1, 1_000_000); + checkpoints1.add(shardCheckpoint); + if (i % 3 == 0) { + checkpoints2.add(shardCheckpoint + 10); + } else { + checkpoints2.add(shardCheckpoint); + } + } + + String indexName = baseIndexName + i; + + if (i < 15) { + checkpointsByIndexOld.put(indexName, checkpoints1.stream().mapToLong(l -> l).toArray()); + } + if (i % 5 != 0) { + checkpointsByIndexNew.put(indexName, checkpoints2.stream().mapToLong(l -> l).toArray()); + } + } + long checkpoint = randomLongBetween(10, 100); + TransformCheckpoint checkpointOld = new TransformCheckpoint(id, timestamp, checkpoint, checkpointsByIndexOld, 0L); + TransformCheckpoint checkpointNew = new TransformCheckpoint(id, timestamp, checkpoint + 1, checkpointsByIndexNew, 0L); + + Set changedIndexes = TransformCheckpoint.getChangedIndices(checkpointOld, checkpointNew) + .stream() + .map(x -> Integer.parseInt(x.substring(baseIndexName.length()))) + .collect(Collectors.toSet()); + + assertThat(changedIndexes.size(), lessThan(indices)); + + for (int i = 0; i < indices; ++i) { + if (i >= 15) { + if (i % 5 == 0) { + assertFalse(changedIndexes.contains(i)); + } else { + assertTrue(changedIndexes.contains(i)); + } + } else if (i % 5 == 0) { + assertFalse(changedIndexes.contains(i)); + } else if (i % 3 == 0) { + assertTrue(changedIndexes.contains(i)); + } else { + assertFalse(changedIndexes.contains(i)); + } + } + + // check against empty + assertThat( + TransformCheckpoint.getChangedIndices(TransformCheckpoint.EMPTY, checkpointNew), + equalTo(checkpointNew.getIndicesCheckpoints().keySet()) + ); + } + private static Map randomCheckpointsByIndex() { Map checkpointsByIndex = new TreeMap<>(); int indices = randomIntBetween(1, 10); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index 413b07b7b7d15..c78dc0760074a 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.reindex.BulkByScrollResponse; @@ -59,6 +60,7 @@ import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -71,7 +73,7 @@ class ClientTransformIndexer extends TransformIndexer { private final AtomicBoolean oldStatsCleanedUp = new AtomicBoolean(false); private final AtomicReference seqNoPrimaryTermAndIndex; - private volatile PointInTimeBuilder pit; + private final ConcurrentHashMap namedPits = new ConcurrentHashMap<>(); private volatile long pitCheckpoint; private volatile boolean disablePit = false; @@ -250,11 +252,7 @@ void doGetInitialProgress(SearchRequest request, ActionListener @Override void doGetFieldMappings(ActionListener> fieldMappingsListener) { - SchemaUtil.getDestinationFieldMappings( - client, - getConfig().getDestination().getIndex(), - fieldMappingsListener - ); + SchemaUtil.getDestinationFieldMappings(client, getConfig().getDestination().getIndex(), fieldMappingsListener); } /** @@ -363,12 +361,20 @@ protected void onStop() { } private void closePointInTime() { + for (String name : namedPits.keySet()) { + closePointInTime(name); + } + } + + private void closePointInTime(String name) { + PointInTimeBuilder pit = namedPits.remove(name); + if (pit == null) { return; } String oldPit = pit.getEncodedId(); - pit = null; + ClosePointInTimeRequest closePitRequest = new ClosePointInTimeRequest(oldPit); ClientHelper.executeWithHeadersAsync( transformConfig.getHeaders(), @@ -383,20 +389,25 @@ private void closePointInTime() { ); } - private void injectPointInTimeIfNeeded(SearchRequest searchRequest, ActionListener listener) { + private void injectPointInTimeIfNeeded( + Tuple namedSearchRequest, + ActionListener> listener + ) { if (disablePit) { - listener.onResponse(searchRequest); + listener.onResponse(namedSearchRequest); return; } + SearchRequest searchRequest = namedSearchRequest.v2(); + PointInTimeBuilder pit = namedPits.get(namedSearchRequest.v1()); if (pit != null) { searchRequest.source().pointInTimeBuilder(pit); - listener.onResponse(searchRequest); + listener.onResponse(namedSearchRequest); return; } // no pit, create a new one - OpenPointInTimeRequest pitRequest = new OpenPointInTimeRequest(transformConfig.getSource().getIndex()).keepAlive(PIT_KEEP_ALIVE); + OpenPointInTimeRequest pitRequest = new OpenPointInTimeRequest(searchRequest.indices()).keepAlive(PIT_KEEP_ALIVE); ClientHelper.executeWithHeadersAsync( transformConfig.getHeaders(), @@ -405,11 +416,17 @@ private void injectPointInTimeIfNeeded(SearchRequest searchRequest, ActionListen OpenPointInTimeAction.INSTANCE, pitRequest, ActionListener.wrap(response -> { - pit = new PointInTimeBuilder(response.getPointInTimeId()).setKeepAlive(PIT_KEEP_ALIVE); - searchRequest.source().pointInTimeBuilder(pit); + PointInTimeBuilder newPit = new PointInTimeBuilder(response.getPointInTimeId()).setKeepAlive(PIT_KEEP_ALIVE); + namedPits.put(namedSearchRequest.v1(), newPit); + searchRequest.source().pointInTimeBuilder(newPit); pitCheckpoint = getNextCheckpoint().getCheckpoint(); - logger.trace("[{}] using pit search context with id [{}]", getJobId(), pit.getEncodedId()); - listener.onResponse(searchRequest); + logger.trace( + "[{}] using pit search context with id [{}]; request [{}]", + getJobId(), + newPit.getEncodedId(), + namedSearchRequest.v1() + ); + listener.onResponse(namedSearchRequest); }, e -> { Throwable unwrappedException = ExceptionsHelper.findSearchExceptionRootCause(e); // if point in time is not supported, disable it but do not remember forever (stopping and starting will give it another @@ -433,25 +450,27 @@ private void injectPointInTimeIfNeeded(SearchRequest searchRequest, ActionListen e ); } - listener.onResponse(searchRequest); + listener.onResponse(namedSearchRequest); }) ); } - private void doSearch(SearchRequest searchRequest, ActionListener listener) { - logger.trace("searchRequest: {}", searchRequest); + private void doSearch(Tuple namedSearchRequest, ActionListener listener) { + logger.trace(() -> new ParameterizedMessage("searchRequest: [{}]", namedSearchRequest.v2())); + + PointInTimeBuilder pit = namedSearchRequest.v2().pointInTimeBuilder(); ClientHelper.executeWithHeadersAsync( transformConfig.getHeaders(), ClientHelper.TRANSFORM_ORIGIN, client, SearchAction.INSTANCE, - searchRequest, + namedSearchRequest.v2(), ActionListener.wrap(response -> { // did the pit change? if (response.pointInTimeId() != null && (pit == null || response.pointInTimeId() != pit.getEncodedId())) { - pit = new PointInTimeBuilder(response.pointInTimeId()).setKeepAlive(PIT_KEEP_ALIVE); - logger.trace("point in time handle has changed"); + namedPits.put(namedSearchRequest.v1(), new PointInTimeBuilder(response.pointInTimeId()).setKeepAlive(PIT_KEEP_ALIVE)); + logger.trace("point in time handle has changed; request [{}]", namedSearchRequest.v1()); } listener.onResponse(response); @@ -461,15 +480,22 @@ private void doSearch(SearchRequest searchRequest, ActionListener getIndicesToQuery(TransformCheckpoint lastCheckpoint, TransformCheckpoint nextCheckpoint); /** * Clear the internal state to free up memory. diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java index 798950f57bada..733a61b4d61f1 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java @@ -1091,32 +1091,34 @@ protected QueryBuilder buildFilterQuery() { return queryBuilder; } - protected SearchRequest buildSearchRequest() { + protected Tuple buildSearchRequest() { assert nextCheckpoint != null; - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().runtimeMappings(getConfig().getSource().getRuntimeMappings()); switch (runState) { case APPLY_RESULTS: - buildUpdateQuery(sourceBuilder); - break; + return new Tuple<>("apply_results", buildQueryToUpdateDestinationIndex()); case IDENTIFY_CHANGES: - buildChangedBucketsQuery(sourceBuilder); - break; + return new Tuple<>("identify_changes", buildQueryToFindChanges()); default: // Any other state is a bug, should not happen logger.warn("Encountered unexpected run state [" + runState + "]"); throw new IllegalStateException("Transform indexer job encountered an illegal state [" + runState + "]"); } - - return new SearchRequest(getConfig().getSource().getIndex()).allowPartialSearchResults(false) - .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) - .source(sourceBuilder); } - private SearchSourceBuilder buildChangedBucketsQuery(SearchSourceBuilder sourceBuilder) { + private SearchRequest buildQueryToFindChanges() { assert isContinuous(); TransformIndexerPosition position = getPosition(); + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().runtimeMappings(getConfig().getSource().getRuntimeMappings()); + + // reduce the indexes to query to the ones that have changes + SearchRequest request = new SearchRequest( + TransformCheckpoint.getChangedIndices(getLastCheckpoint(), getNextCheckpoint()).toArray(new String[0]) + ); + + request.allowPartialSearchResults(false) // shard failures should fail the request + .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); // TODO: make configurable changeCollector.buildChangesQuery(sourceBuilder, position != null ? position.getBucketsPosition() : null, pageSize); @@ -1130,16 +1132,18 @@ private SearchSourceBuilder buildChangedBucketsQuery(SearchSourceBuilder sourceB sourceBuilder.query(filteredQuery); logger.debug("[{}] Querying for changes: {}", getJobId(), sourceBuilder); - return sourceBuilder; + return request.source(sourceBuilder); } - private SearchSourceBuilder buildUpdateQuery(SearchSourceBuilder sourceBuilder) { + private SearchRequest buildQueryToUpdateDestinationIndex() { TransformIndexerPosition position = getPosition(); TransformConfig config = getConfig(); + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().runtimeMappings(getConfig().getSource().getRuntimeMappings()); function.buildSearchQuery(sourceBuilder, position != null ? position.getIndexerPosition() : null, pageSize); + SearchRequest request = new SearchRequest(); QueryBuilder queryBuilder = config.getSource().getQueryConfig().getQuery(); if (isContinuous()) { @@ -1148,22 +1152,27 @@ private SearchSourceBuilder buildUpdateQuery(SearchSourceBuilder sourceBuilder) // Only apply extra filter if it is the subsequent run of the continuous transform if (nextCheckpoint.getCheckpoint() > 1 && changeCollector != null) { - QueryBuilder filter = changeCollector.buildFilterQuery( - lastCheckpoint.getTimeUpperBound(), - nextCheckpoint.getTimeUpperBound() - ); + QueryBuilder filter = changeCollector.buildFilterQuery(lastCheckpoint, nextCheckpoint); if (filter != null) { filteredQuery.filter(filter); } + request.indices(changeCollector.getIndicesToQuery(lastCheckpoint, nextCheckpoint).toArray(new String[0])); + } else { + request.indices(getConfig().getSource().getIndex()); } queryBuilder = filteredQuery; + + } else { + request.indices(getConfig().getSource().getIndex()); } sourceBuilder.query(queryBuilder); logger.debug(() -> new ParameterizedMessage("[{}] Querying for data: {}", getJobId(), sourceBuilder)); - return sourceBuilder; + return request.source(sourceBuilder) + .allowPartialSearchResults(false) // shard failures should fail the request + .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); // TODO: make configurable } /** diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/LatestChangeCollector.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/LatestChangeCollector.java index f4b67b486d1f9..ec3b4a0376f14 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/LatestChangeCollector.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/LatestChangeCollector.java @@ -11,8 +11,10 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.transform.transforms.Function; +import java.util.Collection; import java.util.Map; import java.util.Objects; @@ -40,15 +42,21 @@ public Map processSearchResponse(SearchResponse searchResponse) } @Override - public QueryBuilder buildFilterQuery(long lastCheckpointTimestamp, long nextCheckpointTimestamp) { + public QueryBuilder buildFilterQuery(TransformCheckpoint lastCheckpoint, TransformCheckpoint nextCheckpoint) { // We are only interested in documents that were created in the timeline of the current checkpoint. // Older documents cannot influence the transform results as we require the sort field values to change monotonically over time. return QueryBuilders.rangeQuery(synchronizationField) - .gte(lastCheckpointTimestamp) - .lt(nextCheckpointTimestamp) + .gte(lastCheckpoint.getTimeUpperBound()) + .lt(nextCheckpoint.getTimeUpperBound()) .format("epoch_millis"); } + @Override + public Collection getIndicesToQuery(TransformCheckpoint lastCheckpoint, TransformCheckpoint nextCheckpoint) { + // we can shortcut here, only the changed indices are of interest + return TransformCheckpoint.getChangedIndices(lastCheckpoint, nextCheckpoint); + } + @Override public void clear() { // This object is stateless so there is no internal state to clear diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java index fe3245f410120..6b35c4021beab 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java @@ -32,6 +32,7 @@ import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.SingleValue; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.pivot.DateHistogramGroupSource; import org.elasticsearch.xpack.core.transform.transforms.pivot.HistogramGroupSource; import org.elasticsearch.xpack.core.transform.transforms.pivot.SingleGroupSource; @@ -705,16 +706,19 @@ public SearchSourceBuilder buildChangesQuery(SearchSourceBuilder sourceBuilder, } @Override - public QueryBuilder buildFilterQuery(long lastCheckpointTimestamp, long nextCheckpointTimestamp) { + public QueryBuilder buildFilterQuery(TransformCheckpoint lastCheckpoint, TransformCheckpoint nextCheckpoint) { // shortcut for only 1 element if (fieldCollectors.size() == 1) { - return fieldCollectors.values().iterator().next().filterByChanges(lastCheckpointTimestamp, nextCheckpointTimestamp); + return fieldCollectors.values() + .iterator() + .next() + .filterByChanges(lastCheckpoint.getTimeUpperBound(), nextCheckpoint.getTimeUpperBound()); } BoolQueryBuilder filteredQuery = new BoolQueryBuilder(); for (FieldCollector fieldCollector : fieldCollectors.values()) { - QueryBuilder filter = fieldCollector.filterByChanges(lastCheckpointTimestamp, nextCheckpointTimestamp); + QueryBuilder filter = fieldCollector.filterByChanges(lastCheckpoint.getTimeUpperBound(), nextCheckpoint.getTimeUpperBound()); if (filter != null) { filteredQuery.filter(filter); } @@ -723,6 +727,12 @@ public QueryBuilder buildFilterQuery(long lastCheckpointTimestamp, long nextChec return filteredQuery; } + @Override + public Collection getIndicesToQuery(TransformCheckpoint lastCheckpoint, TransformCheckpoint nextCheckpoint) { + // for updating the data, all indices have to be queried + return TransformCheckpoint.getChangedIndices(TransformCheckpoint.EMPTY, nextCheckpoint); + } + @Override public Map processSearchResponse(final SearchResponse searchResponse) { final Aggregations aggregations = searchResponse.getAggregations(); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java index 36a13c2be0f89..f7cc3aa87d059 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.Client; +import org.elasticsearch.core.Tuple; import org.elasticsearch.search.SearchContextMissingException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -52,7 +53,6 @@ import java.time.Instant; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; @@ -143,7 +143,6 @@ public void testPitInjection() throws InterruptedException { client, mock(TransformIndexerStats.class), config, - Collections.emptyMap(), null, new TransformCheckpoint( "transform", @@ -241,7 +240,6 @@ public void testPitInjectionIfPitNotSupported() throws InterruptedException { client, mock(TransformIndexerStats.class), config, - Collections.emptyMap(), null, new TransformCheckpoint( "transform", @@ -309,7 +307,6 @@ private static class MockClientTransformIndexer extends ClientTransformIndexer { Client client, TransformIndexerStats initialStats, TransformConfig transformConfig, - Map fieldMappings, TransformProgress transformProgress, TransformCheckpoint lastCheckpoint, TransformCheckpoint nextCheckpoint, @@ -336,8 +333,8 @@ private static class MockClientTransformIndexer extends ClientTransformIndexer { } @Override - protected SearchRequest buildSearchRequest() { - return new SearchRequest().source(new SearchSourceBuilder()); + protected Tuple buildSearchRequest() { + return new Tuple<>("mock", new SearchRequest().source(new SearchSourceBuilder())); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index 9c32e6a5a3f07..ad79e2e95dc6f 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -177,7 +177,7 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener } try { - SearchResponse response = searchFunction.apply(buildSearchRequest()); + SearchResponse response = searchFunction.apply(buildSearchRequest().v2()); nextPhase.onResponse(response); } catch (Exception e) { nextPhase.onFailure(e); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/latest/LatestChangeCollectorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/latest/LatestChangeCollectorTests.java index b564cce4b1b00..2fcbaaf6a10ab 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/latest/LatestChangeCollectorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/latest/LatestChangeCollectorTests.java @@ -9,6 +9,11 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; + +import java.util.Collections; +import java.util.Map; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -19,11 +24,223 @@ public void testBuildFilterQuery() { LatestChangeCollector changeCollector = new LatestChangeCollector("timestamp"); assertThat( - changeCollector.buildFilterQuery(0, 123456789), - is(equalTo(QueryBuilders.rangeQuery("timestamp").gte(0L).lt(123456789L).format("epoch_millis")))); + changeCollector.buildFilterQuery( + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 0L), + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 123456789L) + ), + is(equalTo(QueryBuilders.rangeQuery("timestamp").gte(0L).lt(123456789L).format("epoch_millis"))) + ); + + assertThat( + changeCollector.buildFilterQuery( + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 123456789L), + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 234567890L) + ), + is(equalTo(QueryBuilders.rangeQuery("timestamp").gte(123456789L).lt(234567890L).format("epoch_millis"))) + ); + } + + public void testGetIndicesToQuery() { + LatestChangeCollector changeCollector = new LatestChangeCollector("timestamp"); + + long[] indexSequenceIds1 = { 25L, 25L, 25L }; + long[] indexSequenceIds2 = { 324L, 2425L, 2225L }; + long[] indexSequenceIds3 = { 244L, 225L, 2425L }; + long[] indexSequenceIds4 = { 2005L, 2445L, 2425L }; + + long[] indexSequenceIds3_1 = { 246L, 255L, 2485L }; + long[] indexSequenceIds4_1 = { 2105L, 2545L, 2525L }; + + // no changes + assertThat( + changeCollector.getIndicesToQuery( + new TransformCheckpoint( + "t_id", + 123513L, + 42L, + Map.of( + "index-1", + indexSequenceIds1, + "index-2", + indexSequenceIds2, + "index-3", + indexSequenceIds3, + "index-4", + indexSequenceIds4 + ), + 123543L + ), + new TransformCheckpoint( + "t_id", + 123456759L, + 43L, + Map.of( + "index-1", + indexSequenceIds1, + "index-2", + indexSequenceIds2, + "index-3", + indexSequenceIds3, + "index-4", + indexSequenceIds4 + ), + 123456789L + ) + ), + equalTo(Collections.emptySet()) + ); + + // 3 and 4 changed, 1 and 2 not + assertThat( + changeCollector.getIndicesToQuery( + new TransformCheckpoint( + "t_id", + 123513L, + 42L, + Map.of( + "index-1", + indexSequenceIds1, + "index-2", + indexSequenceIds2, + "index-3", + indexSequenceIds3, + "index-4", + indexSequenceIds4 + ), + 123543L + ), + new TransformCheckpoint( + "t_id", + 123456759L, + 43L, + Map.of( + "index-1", + indexSequenceIds1, + "index-2", + indexSequenceIds2, + "index-3", + indexSequenceIds3_1, + "index-4", + indexSequenceIds4_1 + ), + 123456789L + ) + ), + equalTo(Set.of("index-3", "index-4")) + ); + + // only 3 changed (no order) + assertThat( + changeCollector.getIndicesToQuery( + new TransformCheckpoint( + "t_id", + 123513L, + 42L, + Map.of( + "index-1", + indexSequenceIds1, + "index-2", + indexSequenceIds2, + "index-3", + indexSequenceIds3, + "index-4", + indexSequenceIds4 + ), + 123543L + ), + new TransformCheckpoint( + "t_id", + 123456759L, + 43L, + Map.of( + "index-1", + indexSequenceIds1, + "index-2", + indexSequenceIds2, + "index-3", + indexSequenceIds3_1, + "index-4", + indexSequenceIds4 + ), + 123456789L + ) + ), + equalTo(Collections.singleton("index-3")) + ); + + // all have changed + assertThat( + changeCollector.getIndicesToQuery( + new TransformCheckpoint("t_id", 123513L, 42L, Map.of("index-3", indexSequenceIds3, "index-4", indexSequenceIds4), 123543L), + new TransformCheckpoint( + "t_id", + 123456759L, + 43L, + Map.of("index-3", indexSequenceIds3_1, "index-4", indexSequenceIds4_1), + 123456789L + ) + ), + equalTo(Set.of("index-3", "index-4")) + ); + + // a new index appeared + assertThat( + changeCollector.getIndicesToQuery( + new TransformCheckpoint( + "t_id", + 123513L, + 42L, + Map.of("index-2", indexSequenceIds2, "index-3", indexSequenceIds3, "index-4", indexSequenceIds4), + 123543L + ), + new TransformCheckpoint( + "t_id", + 123456759L, + 43L, + Map.of( + "index-1", + indexSequenceIds1, + "index-2", + indexSequenceIds2, + "index-3", + indexSequenceIds3_1, + "index-4", + indexSequenceIds4_1 + ), + 123456789L + ) + ), + equalTo(Set.of("index-1", "index-3", "index-4")) + ); + // index disappeared assertThat( - changeCollector.buildFilterQuery(123456789, 234567890), - is(equalTo(QueryBuilders.rangeQuery("timestamp").gte(123456789L).lt(234567890L).format("epoch_millis")))); + changeCollector.getIndicesToQuery( + new TransformCheckpoint( + "t_id", + 123513L, + 42L, + Map.of( + "index-1", + indexSequenceIds1, + "index-2", + indexSequenceIds2, + "index-3", + indexSequenceIds3, + "index-4", + indexSequenceIds4 + ), + 123543L + ), + new TransformCheckpoint( + "t_id", + 123456759L, + 43L, + Map.of("index-2", indexSequenceIds2, "index-3", indexSequenceIds3_1, "index-4", indexSequenceIds4_1), + 123456789L + ) + ), + equalTo(Set.of("index-3", "index-4")) + ); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java index 4254698ee45a0..adeef0af15f29 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.SingleValue; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.pivot.DateHistogramGroupSource; import org.elasticsearch.xpack.core.transform.transforms.pivot.GeoTileGroupSourceTests; import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfig; @@ -122,7 +123,10 @@ public void testTermsFieldCollector() throws IOException { collector.processSearchResponse(response); - QueryBuilder queryBuilder = collector.buildFilterQuery(0, 0); + QueryBuilder queryBuilder = collector.buildFilterQuery( + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 0L), + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 0L) + ); assertNotNull(queryBuilder); assertThat(queryBuilder, instanceOf(TermsQueryBuilder.class)); assertThat(((TermsQueryBuilder) queryBuilder).values(), containsInAnyOrder("id1", "id2", "id3")); @@ -142,7 +146,11 @@ public void testDateHistogramFieldCollector() throws IOException { ChangeCollector collector = CompositeBucketsChangeCollector.buildChangeCollector(groups, "timestamp"); - QueryBuilder queryBuilder = collector.buildFilterQuery(66_666, 200_222); + QueryBuilder queryBuilder = collector.buildFilterQuery( + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 66_666L), + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 200_222L) + ); + assertNotNull(queryBuilder); assertThat(queryBuilder, instanceOf(RangeQueryBuilder.class)); // rounded down @@ -168,7 +176,11 @@ public void testDateHistogramFieldCollector() throws IOException { collector.processSearchResponse(response); // provide checkpoints, although they don't matter in this case - queryBuilder = collector.buildFilterQuery(66_666, 200_222); + queryBuilder = collector.buildFilterQuery( + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 66_666L), + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 200_222L) + ); + assertNotNull(queryBuilder); assertThat(queryBuilder, instanceOf(RangeQueryBuilder.class)); // rounded down @@ -190,7 +202,11 @@ public void testDateHistogramFieldCollector() throws IOException { // simulate the agg response, that should inject collector.processSearchResponse(response); - queryBuilder = collector.buildFilterQuery(66_666, 200_222); + queryBuilder = collector.buildFilterQuery( + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 66_666L), + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 200_222L) + ); + assertNotNull(queryBuilder); assertThat(queryBuilder, instanceOf(RangeQueryBuilder.class)); @@ -214,12 +230,18 @@ public void testDateHistogramFieldCollector() throws IOException { collector = CompositeBucketsChangeCollector.buildChangeCollector(groups, "timestamp"); - queryBuilder = collector.buildFilterQuery(66_666, 200_222); + queryBuilder = collector.buildFilterQuery( + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 66_666L), + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 200_222L) + ); assertNull(queryBuilder); collector = CompositeBucketsChangeCollector.buildChangeCollector(groups, "sync_timestamp"); - queryBuilder = collector.buildFilterQuery(66_666, 200_222); + queryBuilder = collector.buildFilterQuery( + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 66_666L), + new TransformCheckpoint("t_id", 42L, 42L, Collections.emptyMap(), 200_222L) + ); assertNull(queryBuilder); } From d349f252476a0f50dbc3fd23bc796d77ec6c97fa Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 26 Aug 2021 15:41:57 +0300 Subject: [PATCH 018/128] QL: regenerate parser due to ANTLR upgrade (#76970) * QL: regenerate parser due to ANTLR upgrade To avoid warnings and subtle change in behavior, regenerate the parser classes using the upgraded ANTLR (4.9.2 vs 4.5.3). Fix #76969 --- .../xpack/eql/parser/EqlBaseLexer.java | 321 +++++++++--------- .../xpack/eql/parser/EqlBaseParser.java | 172 ++++++---- .../xpack/eql/analysis/VerifierTests.java | 3 +- .../xpack/eql/parser/ExpressionTests.java | 2 +- .../xpack/sql/parser/SqlBaseLexer.java | 169 ++++----- .../xpack/sql/parser/SqlBaseParser.java | 319 ++++++++++++----- 6 files changed, 607 insertions(+), 379 deletions(-) diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseLexer.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseLexer.java index a4ec2265905a6..0a822b9b8f486 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseLexer.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseLexer.java @@ -11,7 +11,7 @@ @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class EqlBaseLexer extends Lexer { - static { RuntimeMetaData.checkVersion("4.5.3", RuntimeMetaData.VERSION); } + static { RuntimeMetaData.checkVersion("4.9.2", RuntimeMetaData.VERSION); } protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = @@ -24,37 +24,51 @@ class EqlBaseLexer extends Lexer { SLASH=33, PERCENT=34, DOT=35, COMMA=36, LB=37, RB=38, LP=39, RP=40, PIPE=41, STRING=42, INTEGER_VALUE=43, DECIMAL_VALUE=44, IDENTIFIER=45, QUOTED_IDENTIFIER=46, TILDE_IDENTIFIER=47, LINE_COMMENT=48, BRACKETED_COMMENT=49, WS=50; + public static String[] channelNames = { + "DEFAULT_TOKEN_CHANNEL", "HIDDEN" + }; + public static String[] modeNames = { "DEFAULT_MODE" }; - public static final String[] ruleNames = { - "AND", "ANY", "BY", "FALSE", "IN", "IN_INSENSITIVE", "JOIN", "LIKE", "LIKE_INSENSITIVE", - "MAXSPAN", "NOT", "NULL", "OF", "OR", "REGEX", "REGEX_INSENSITIVE", "SEQUENCE", - "TRUE", "UNTIL", "WHERE", "WITH", "SEQ", "ASGN", "EQ", "NEQ", "LT", "LTE", - "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "DOT", "COMMA", - "LB", "RB", "LP", "RP", "PIPE", "STRING_ESCAPE", "HEX_DIGIT", "UNICODE_ESCAPE", - "UNESCAPED_CHARS", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", - "QUOTED_IDENTIFIER", "TILDE_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER", - "LINE_COMMENT", "BRACKETED_COMMENT", "WS" - }; + private static String[] makeRuleNames() { + return new String[] { + "AND", "ANY", "BY", "FALSE", "IN", "IN_INSENSITIVE", "JOIN", "LIKE", + "LIKE_INSENSITIVE", "MAXSPAN", "NOT", "NULL", "OF", "OR", "REGEX", "REGEX_INSENSITIVE", + "SEQUENCE", "TRUE", "UNTIL", "WHERE", "WITH", "SEQ", "ASGN", "EQ", "NEQ", + "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", + "DOT", "COMMA", "LB", "RB", "LP", "RP", "PIPE", "STRING_ESCAPE", "HEX_DIGIT", + "UNICODE_ESCAPE", "UNESCAPED_CHARS", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", + "IDENTIFIER", "QUOTED_IDENTIFIER", "TILDE_IDENTIFIER", "EXPONENT", "DIGIT", + "LETTER", "LINE_COMMENT", "BRACKETED_COMMENT", "WS" + }; + } + public static final String[] ruleNames = makeRuleNames(); - private static final String[] _LITERAL_NAMES = { - null, "'and'", "'any'", "'by'", "'false'", "'in'", "'in~'", "'join'", - "'like'", "'like~'", "'maxspan'", "'not'", "'null'", "'of'", "'or'", "'regex'", - "'regex~'", "'sequence'", "'true'", "'until'", "'where'", "'with'", "':'", - "'='", "'=='", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", - "'/'", "'%'", "'.'", "','", "'['", "']'", "'('", "')'", "'|'" - }; - private static final String[] _SYMBOLIC_NAMES = { - null, "AND", "ANY", "BY", "FALSE", "IN", "IN_INSENSITIVE", "JOIN", "LIKE", - "LIKE_INSENSITIVE", "MAXSPAN", "NOT", "NULL", "OF", "OR", "REGEX", "REGEX_INSENSITIVE", - "SEQUENCE", "TRUE", "UNTIL", "WHERE", "WITH", "SEQ", "ASGN", "EQ", "NEQ", - "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", - "DOT", "COMMA", "LB", "RB", "LP", "RP", "PIPE", "STRING", "INTEGER_VALUE", - "DECIMAL_VALUE", "IDENTIFIER", "QUOTED_IDENTIFIER", "TILDE_IDENTIFIER", - "LINE_COMMENT", "BRACKETED_COMMENT", "WS" - }; + private static String[] makeLiteralNames() { + return new String[] { + null, "'and'", "'any'", "'by'", "'false'", "'in'", "'in~'", "'join'", + "'like'", "'like~'", "'maxspan'", "'not'", "'null'", "'of'", "'or'", + "'regex'", "'regex~'", "'sequence'", "'true'", "'until'", "'where'", + "'with'", "':'", "'='", "'=='", "'!='", "'<'", "'<='", "'>'", "'>='", + "'+'", "'-'", "'*'", "'/'", "'%'", "'.'", "','", "'['", "']'", "'('", + "')'", "'|'" + }; + } + private static final String[] _LITERAL_NAMES = makeLiteralNames(); + private static String[] makeSymbolicNames() { + return new String[] { + null, "AND", "ANY", "BY", "FALSE", "IN", "IN_INSENSITIVE", "JOIN", "LIKE", + "LIKE_INSENSITIVE", "MAXSPAN", "NOT", "NULL", "OF", "OR", "REGEX", "REGEX_INSENSITIVE", + "SEQUENCE", "TRUE", "UNTIL", "WHERE", "WITH", "SEQ", "ASGN", "EQ", "NEQ", + "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", + "DOT", "COMMA", "LB", "RB", "LP", "RP", "PIPE", "STRING", "INTEGER_VALUE", + "DECIMAL_VALUE", "IDENTIFIER", "QUOTED_IDENTIFIER", "TILDE_IDENTIFIER", + "LINE_COMMENT", "BRACKETED_COMMENT", "WS" + }; + } + private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); /** @@ -103,6 +117,9 @@ public EqlBaseLexer(CharStream input) { @Override public String getSerializedATN() { return _serializedATN; } + @Override + public String[] getChannelNames() { return channelNames; } + @Override public String[] getModeNames() { return modeNames; } @@ -110,7 +127,7 @@ public EqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\64\u01e7\b\1\4\2"+ + "\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\64\u01e7\b\1\4\2"+ "\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4"+ "\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ @@ -150,130 +167,130 @@ public EqlBaseLexer(CharStream input) { "e\60g\61i\2k\2m\2o\62q\63s\64\3\2\20\n\2$$))^^ddhhppttvv\5\2\62;CHch\6"+ "\2\f\f\17\17$$^^\4\2\f\f\17\17\6\2\f\f\17\17))^^\5\2\f\f\17\17$$\5\2\f"+ "\f\17\17))\4\2BBaa\3\2bb\4\2GGgg\4\2--//\3\2\62;\4\2C\\c|\5\2\13\f\17"+ - "\17\"\"\u020c\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2"+ - "\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2"+ - "\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2"+ - "\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2"+ - "\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3"+ - "\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2"+ - "\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2"+ - "S\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3"+ - "\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\3u\3\2\2\2\5y\3\2\2\2\7}\3\2\2"+ - "\2\t\u0080\3\2\2\2\13\u0086\3\2\2\2\r\u0089\3\2\2\2\17\u008d\3\2\2\2\21"+ - "\u0092\3\2\2\2\23\u0097\3\2\2\2\25\u009d\3\2\2\2\27\u00a5\3\2\2\2\31\u00a9"+ - "\3\2\2\2\33\u00ae\3\2\2\2\35\u00b1\3\2\2\2\37\u00b4\3\2\2\2!\u00ba\3\2"+ - "\2\2#\u00c1\3\2\2\2%\u00ca\3\2\2\2\'\u00cf\3\2\2\2)\u00d5\3\2\2\2+\u00db"+ - "\3\2\2\2-\u00e0\3\2\2\2/\u00e2\3\2\2\2\61\u00e4\3\2\2\2\63\u00e7\3\2\2"+ - "\2\65\u00ea\3\2\2\2\67\u00ec\3\2\2\29\u00ef\3\2\2\2;\u00f1\3\2\2\2=\u00f4"+ - "\3\2\2\2?\u00f6\3\2\2\2A\u00f8\3\2\2\2C\u00fa\3\2\2\2E\u00fc\3\2\2\2G"+ - "\u00fe\3\2\2\2I\u0100\3\2\2\2K\u0102\3\2\2\2M\u0104\3\2\2\2O\u0106\3\2"+ - "\2\2Q\u0108\3\2\2\2S\u010a\3\2\2\2U\u010c\3\2\2\2W\u010f\3\2\2\2Y\u0111"+ - "\3\2\2\2[\u011c\3\2\2\2]\u015e\3\2\2\2_\u0161\3\2\2\2a\u018f\3\2\2\2c"+ - "\u0193\3\2\2\2e\u019d\3\2\2\2g\u01a8\3\2\2\2i\u01b3\3\2\2\2k\u01bc\3\2"+ - "\2\2m\u01be\3\2\2\2o\u01c0\3\2\2\2q\u01d1\3\2\2\2s\u01e1\3\2\2\2uv\7c"+ - "\2\2vw\7p\2\2wx\7f\2\2x\4\3\2\2\2yz\7c\2\2z{\7p\2\2{|\7{\2\2|\6\3\2\2"+ - "\2}~\7d\2\2~\177\7{\2\2\177\b\3\2\2\2\u0080\u0081\7h\2\2\u0081\u0082\7"+ - "c\2\2\u0082\u0083\7n\2\2\u0083\u0084\7u\2\2\u0084\u0085\7g\2\2\u0085\n"+ - "\3\2\2\2\u0086\u0087\7k\2\2\u0087\u0088\7p\2\2\u0088\f\3\2\2\2\u0089\u008a"+ - "\7k\2\2\u008a\u008b\7p\2\2\u008b\u008c\7\u0080\2\2\u008c\16\3\2\2\2\u008d"+ - "\u008e\7l\2\2\u008e\u008f\7q\2\2\u008f\u0090\7k\2\2\u0090\u0091\7p\2\2"+ - "\u0091\20\3\2\2\2\u0092\u0093\7n\2\2\u0093\u0094\7k\2\2\u0094\u0095\7"+ - "m\2\2\u0095\u0096\7g\2\2\u0096\22\3\2\2\2\u0097\u0098\7n\2\2\u0098\u0099"+ - "\7k\2\2\u0099\u009a\7m\2\2\u009a\u009b\7g\2\2\u009b\u009c\7\u0080\2\2"+ - "\u009c\24\3\2\2\2\u009d\u009e\7o\2\2\u009e\u009f\7c\2\2\u009f\u00a0\7"+ - "z\2\2\u00a0\u00a1\7u\2\2\u00a1\u00a2\7r\2\2\u00a2\u00a3\7c\2\2\u00a3\u00a4"+ - "\7p\2\2\u00a4\26\3\2\2\2\u00a5\u00a6\7p\2\2\u00a6\u00a7\7q\2\2\u00a7\u00a8"+ - "\7v\2\2\u00a8\30\3\2\2\2\u00a9\u00aa\7p\2\2\u00aa\u00ab\7w\2\2\u00ab\u00ac"+ - "\7n\2\2\u00ac\u00ad\7n\2\2\u00ad\32\3\2\2\2\u00ae\u00af\7q\2\2\u00af\u00b0"+ - "\7h\2\2\u00b0\34\3\2\2\2\u00b1\u00b2\7q\2\2\u00b2\u00b3\7t\2\2\u00b3\36"+ - "\3\2\2\2\u00b4\u00b5\7t\2\2\u00b5\u00b6\7g\2\2\u00b6\u00b7\7i\2\2\u00b7"+ - "\u00b8\7g\2\2\u00b8\u00b9\7z\2\2\u00b9 \3\2\2\2\u00ba\u00bb\7t\2\2\u00bb"+ - "\u00bc\7g\2\2\u00bc\u00bd\7i\2\2\u00bd\u00be\7g\2\2\u00be\u00bf\7z\2\2"+ - "\u00bf\u00c0\7\u0080\2\2\u00c0\"\3\2\2\2\u00c1\u00c2\7u\2\2\u00c2\u00c3"+ - "\7g\2\2\u00c3\u00c4\7s\2\2\u00c4\u00c5\7w\2\2\u00c5\u00c6\7g\2\2\u00c6"+ - "\u00c7\7p\2\2\u00c7\u00c8\7e\2\2\u00c8\u00c9\7g\2\2\u00c9$\3\2\2\2\u00ca"+ - "\u00cb\7v\2\2\u00cb\u00cc\7t\2\2\u00cc\u00cd\7w\2\2\u00cd\u00ce\7g\2\2"+ - "\u00ce&\3\2\2\2\u00cf\u00d0\7w\2\2\u00d0\u00d1\7p\2\2\u00d1\u00d2\7v\2"+ - "\2\u00d2\u00d3\7k\2\2\u00d3\u00d4\7n\2\2\u00d4(\3\2\2\2\u00d5\u00d6\7"+ - "y\2\2\u00d6\u00d7\7j\2\2\u00d7\u00d8\7g\2\2\u00d8\u00d9\7t\2\2\u00d9\u00da"+ - "\7g\2\2\u00da*\3\2\2\2\u00db\u00dc\7y\2\2\u00dc\u00dd\7k\2\2\u00dd\u00de"+ - "\7v\2\2\u00de\u00df\7j\2\2\u00df,\3\2\2\2\u00e0\u00e1\7<\2\2\u00e1.\3"+ - "\2\2\2\u00e2\u00e3\7?\2\2\u00e3\60\3\2\2\2\u00e4\u00e5\7?\2\2\u00e5\u00e6"+ - "\7?\2\2\u00e6\62\3\2\2\2\u00e7\u00e8\7#\2\2\u00e8\u00e9\7?\2\2\u00e9\64"+ - "\3\2\2\2\u00ea\u00eb\7>\2\2\u00eb\66\3\2\2\2\u00ec\u00ed\7>\2\2\u00ed"+ - "\u00ee\7?\2\2\u00ee8\3\2\2\2\u00ef\u00f0\7@\2\2\u00f0:\3\2\2\2\u00f1\u00f2"+ - "\7@\2\2\u00f2\u00f3\7?\2\2\u00f3<\3\2\2\2\u00f4\u00f5\7-\2\2\u00f5>\3"+ - "\2\2\2\u00f6\u00f7\7/\2\2\u00f7@\3\2\2\2\u00f8\u00f9\7,\2\2\u00f9B\3\2"+ - "\2\2\u00fa\u00fb\7\61\2\2\u00fbD\3\2\2\2\u00fc\u00fd\7\'\2\2\u00fdF\3"+ - "\2\2\2\u00fe\u00ff\7\60\2\2\u00ffH\3\2\2\2\u0100\u0101\7.\2\2\u0101J\3"+ - "\2\2\2\u0102\u0103\7]\2\2\u0103L\3\2\2\2\u0104\u0105\7_\2\2\u0105N\3\2"+ - "\2\2\u0106\u0107\7*\2\2\u0107P\3\2\2\2\u0108\u0109\7+\2\2\u0109R\3\2\2"+ - "\2\u010a\u010b\7~\2\2\u010bT\3\2\2\2\u010c\u010d\7^\2\2\u010d\u010e\t"+ - "\2\2\2\u010eV\3\2\2\2\u010f\u0110\t\3\2\2\u0110X\3\2\2\2\u0111\u0112\7"+ - "^\2\2\u0112\u0113\7w\2\2\u0113\u0114\3\2\2\2\u0114\u0116\7}\2\2\u0115"+ - "\u0117\5W,\2\u0116\u0115\3\2\2\2\u0117\u0118\3\2\2\2\u0118\u0116\3\2\2"+ - "\2\u0118\u0119\3\2\2\2\u0119\u011a\3\2\2\2\u011a\u011b\7\177\2\2\u011b"+ - "Z\3\2\2\2\u011c\u011d\n\4\2\2\u011d\\\3\2\2\2\u011e\u0124\7$\2\2\u011f"+ - "\u0123\5U+\2\u0120\u0123\5Y-\2\u0121\u0123\5[.\2\u0122\u011f\3\2\2\2\u0122"+ - "\u0120\3\2\2\2\u0122\u0121\3\2\2\2\u0123\u0126\3\2\2\2\u0124\u0122\3\2"+ - "\2\2\u0124\u0125\3\2\2\2\u0125\u0127\3\2\2\2\u0126\u0124\3\2\2\2\u0127"+ - "\u015f\7$\2\2\u0128\u0129\7$\2\2\u0129\u012a\7$\2\2\u012a\u012b\7$\2\2"+ - "\u012b\u012f\3\2\2\2\u012c\u012e\n\5\2\2\u012d\u012c\3\2\2\2\u012e\u0131"+ - "\3\2\2\2\u012f\u0130\3\2\2\2\u012f\u012d\3\2\2\2\u0130\u0132\3\2\2\2\u0131"+ - "\u012f\3\2\2\2\u0132\u0133\7$\2\2\u0133\u0134\7$\2\2\u0134\u0135\7$\2"+ - "\2\u0135\u0137\3\2\2\2\u0136\u0138\7$\2\2\u0137\u0136\3\2\2\2\u0137\u0138"+ - "\3\2\2\2\u0138\u013a\3\2\2\2\u0139\u013b\7$\2\2\u013a\u0139\3\2\2\2\u013a"+ - "\u013b\3\2\2\2\u013b\u015f\3\2\2\2\u013c\u0142\7)\2\2\u013d\u013e\7^\2"+ - "\2\u013e\u0141\t\2\2\2\u013f\u0141\n\6\2\2\u0140\u013d\3\2\2\2\u0140\u013f"+ - "\3\2\2\2\u0141\u0144\3\2\2\2\u0142\u0140\3\2\2\2\u0142\u0143\3\2\2\2\u0143"+ - "\u0145\3\2\2\2\u0144\u0142\3\2\2\2\u0145\u015f\7)\2\2\u0146\u0147\7A\2"+ - "\2\u0147\u0148\7$\2\2\u0148\u014e\3\2\2\2\u0149\u014a\7^\2\2\u014a\u014d"+ - "\7$\2\2\u014b\u014d\n\7\2\2\u014c\u0149\3\2\2\2\u014c\u014b\3\2\2\2\u014d"+ - "\u0150\3\2\2\2\u014e\u014c\3\2\2\2\u014e\u014f\3\2\2\2\u014f\u0151\3\2"+ - "\2\2\u0150\u014e\3\2\2\2\u0151\u015f\7$\2\2\u0152\u0153\7A\2\2\u0153\u0154"+ - "\7)\2\2\u0154\u015a\3\2\2\2\u0155\u0156\7^\2\2\u0156\u0159\7)\2\2\u0157"+ - "\u0159\n\b\2\2\u0158\u0155\3\2\2\2\u0158\u0157\3\2\2\2\u0159\u015c\3\2"+ - "\2\2\u015a\u0158\3\2\2\2\u015a\u015b\3\2\2\2\u015b\u015d\3\2\2\2\u015c"+ - "\u015a\3\2\2\2\u015d\u015f\7)\2\2\u015e\u011e\3\2\2\2\u015e\u0128\3\2"+ - "\2\2\u015e\u013c\3\2\2\2\u015e\u0146\3\2\2\2\u015e\u0152\3\2\2\2\u015f"+ - "^\3\2\2\2\u0160\u0162\5k\66\2\u0161\u0160\3\2\2\2\u0162\u0163\3\2\2\2"+ - "\u0163\u0161\3\2\2\2\u0163\u0164\3\2\2\2\u0164`\3\2\2\2\u0165\u0167\5"+ - "k\66\2\u0166\u0165\3\2\2\2\u0167\u0168\3\2\2\2\u0168\u0166\3\2\2\2\u0168"+ - "\u0169\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u016e\5G$\2\u016b\u016d\5k\66"+ - "\2\u016c\u016b\3\2\2\2\u016d\u0170\3\2\2\2\u016e\u016c\3\2\2\2\u016e\u016f"+ - "\3\2\2\2\u016f\u0190\3\2\2\2\u0170\u016e\3\2\2\2\u0171\u0173\5G$\2\u0172"+ - "\u0174\5k\66\2\u0173\u0172\3\2\2\2\u0174\u0175\3\2\2\2\u0175\u0173\3\2"+ - "\2\2\u0175\u0176\3\2\2\2\u0176\u0190\3\2\2\2\u0177\u0179\5k\66\2\u0178"+ - "\u0177\3\2\2\2\u0179\u017a\3\2\2\2\u017a\u0178\3\2\2\2\u017a\u017b\3\2"+ - "\2\2\u017b\u0183\3\2\2\2\u017c\u0180\5G$\2\u017d\u017f\5k\66\2\u017e\u017d"+ - "\3\2\2\2\u017f\u0182\3\2\2\2\u0180\u017e\3\2\2\2\u0180\u0181\3\2\2\2\u0181"+ - "\u0184\3\2\2\2\u0182\u0180\3\2\2\2\u0183\u017c\3\2\2\2\u0183\u0184\3\2"+ - "\2\2\u0184\u0185\3\2\2\2\u0185\u0186\5i\65\2\u0186\u0190\3\2\2\2\u0187"+ - "\u0189\5G$\2\u0188\u018a\5k\66\2\u0189\u0188\3\2\2\2\u018a\u018b\3\2\2"+ - "\2\u018b\u0189\3\2\2\2\u018b\u018c\3\2\2\2\u018c\u018d\3\2\2\2\u018d\u018e"+ - "\5i\65\2\u018e\u0190\3\2\2\2\u018f\u0166\3\2\2\2\u018f\u0171\3\2\2\2\u018f"+ - "\u0178\3\2\2\2\u018f\u0187\3\2\2\2\u0190b\3\2\2\2\u0191\u0194\5m\67\2"+ - "\u0192\u0194\t\t\2\2\u0193\u0191\3\2\2\2\u0193\u0192\3\2\2\2\u0194\u019a"+ - "\3\2\2\2\u0195\u0199\5m\67\2\u0196\u0199\5k\66\2\u0197\u0199\7a\2\2\u0198"+ - "\u0195\3\2\2\2\u0198\u0196\3\2\2\2\u0198\u0197\3\2\2\2\u0199\u019c\3\2"+ - "\2\2\u019a\u0198\3\2\2\2\u019a\u019b\3\2\2\2\u019bd\3\2\2\2\u019c\u019a"+ - "\3\2\2\2\u019d\u01a3\7b\2\2\u019e\u01a2\n\n\2\2\u019f\u01a0\7b\2\2\u01a0"+ - "\u01a2\7b\2\2\u01a1\u019e\3\2\2\2\u01a1\u019f\3\2\2\2\u01a2\u01a5\3\2"+ - "\2\2\u01a3\u01a1\3\2\2\2\u01a3\u01a4\3\2\2\2\u01a4\u01a6\3\2\2\2\u01a5"+ - "\u01a3\3\2\2\2\u01a6\u01a7\7b\2\2\u01a7f\3\2\2\2\u01a8\u01ae\5m\67\2\u01a9"+ - "\u01ad\5m\67\2\u01aa\u01ad\5k\66\2\u01ab\u01ad\7a\2\2\u01ac\u01a9\3\2"+ - "\2\2\u01ac\u01aa\3\2\2\2\u01ac\u01ab\3\2\2\2\u01ad\u01b0\3\2\2\2\u01ae"+ - "\u01ac\3\2\2\2\u01ae\u01af\3\2\2\2\u01af\u01b1\3\2\2\2\u01b0\u01ae\3\2"+ - "\2\2\u01b1\u01b2\7\u0080\2\2\u01b2h\3\2\2\2\u01b3\u01b5\t\13\2\2\u01b4"+ - "\u01b6\t\f\2\2\u01b5\u01b4\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\u01b8\3\2"+ - "\2\2\u01b7\u01b9\5k\66\2\u01b8\u01b7\3\2\2\2\u01b9\u01ba\3\2\2\2\u01ba"+ - "\u01b8\3\2\2\2\u01ba\u01bb\3\2\2\2\u01bbj\3\2\2\2\u01bc\u01bd\t\r\2\2"+ - "\u01bdl\3\2\2\2\u01be\u01bf\t\16\2\2\u01bfn\3\2\2\2\u01c0\u01c1\7\61\2"+ - "\2\u01c1\u01c2\7\61\2\2\u01c2\u01c6\3\2\2\2\u01c3\u01c5\n\5\2\2\u01c4"+ - "\u01c3\3\2\2\2\u01c5\u01c8\3\2\2\2\u01c6\u01c4\3\2\2\2\u01c6\u01c7\3\2"+ - "\2\2\u01c7\u01ca\3\2\2\2\u01c8\u01c6\3\2\2\2\u01c9\u01cb\7\17\2\2\u01ca"+ - "\u01c9\3\2\2\2\u01ca\u01cb\3\2\2\2\u01cb\u01cd\3\2\2\2\u01cc\u01ce\7\f"+ - "\2\2\u01cd\u01cc\3\2\2\2\u01cd\u01ce\3\2\2\2\u01ce\u01cf\3\2\2\2\u01cf"+ + "\17\"\"\2\u020c\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3"+ + "\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2"+ + "\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3"+ + "\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2"+ + "\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\2"+ + "9\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3"+ + "\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2"+ + "\2\2S\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2"+ + "g\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\3u\3\2\2\2\5y\3\2\2\2\7}\3"+ + "\2\2\2\t\u0080\3\2\2\2\13\u0086\3\2\2\2\r\u0089\3\2\2\2\17\u008d\3\2\2"+ + "\2\21\u0092\3\2\2\2\23\u0097\3\2\2\2\25\u009d\3\2\2\2\27\u00a5\3\2\2\2"+ + "\31\u00a9\3\2\2\2\33\u00ae\3\2\2\2\35\u00b1\3\2\2\2\37\u00b4\3\2\2\2!"+ + "\u00ba\3\2\2\2#\u00c1\3\2\2\2%\u00ca\3\2\2\2\'\u00cf\3\2\2\2)\u00d5\3"+ + "\2\2\2+\u00db\3\2\2\2-\u00e0\3\2\2\2/\u00e2\3\2\2\2\61\u00e4\3\2\2\2\63"+ + "\u00e7\3\2\2\2\65\u00ea\3\2\2\2\67\u00ec\3\2\2\29\u00ef\3\2\2\2;\u00f1"+ + "\3\2\2\2=\u00f4\3\2\2\2?\u00f6\3\2\2\2A\u00f8\3\2\2\2C\u00fa\3\2\2\2E"+ + "\u00fc\3\2\2\2G\u00fe\3\2\2\2I\u0100\3\2\2\2K\u0102\3\2\2\2M\u0104\3\2"+ + "\2\2O\u0106\3\2\2\2Q\u0108\3\2\2\2S\u010a\3\2\2\2U\u010c\3\2\2\2W\u010f"+ + "\3\2\2\2Y\u0111\3\2\2\2[\u011c\3\2\2\2]\u015e\3\2\2\2_\u0161\3\2\2\2a"+ + "\u018f\3\2\2\2c\u0193\3\2\2\2e\u019d\3\2\2\2g\u01a8\3\2\2\2i\u01b3\3\2"+ + "\2\2k\u01bc\3\2\2\2m\u01be\3\2\2\2o\u01c0\3\2\2\2q\u01d1\3\2\2\2s\u01e1"+ + "\3\2\2\2uv\7c\2\2vw\7p\2\2wx\7f\2\2x\4\3\2\2\2yz\7c\2\2z{\7p\2\2{|\7{"+ + "\2\2|\6\3\2\2\2}~\7d\2\2~\177\7{\2\2\177\b\3\2\2\2\u0080\u0081\7h\2\2"+ + "\u0081\u0082\7c\2\2\u0082\u0083\7n\2\2\u0083\u0084\7u\2\2\u0084\u0085"+ + "\7g\2\2\u0085\n\3\2\2\2\u0086\u0087\7k\2\2\u0087\u0088\7p\2\2\u0088\f"+ + "\3\2\2\2\u0089\u008a\7k\2\2\u008a\u008b\7p\2\2\u008b\u008c\7\u0080\2\2"+ + "\u008c\16\3\2\2\2\u008d\u008e\7l\2\2\u008e\u008f\7q\2\2\u008f\u0090\7"+ + "k\2\2\u0090\u0091\7p\2\2\u0091\20\3\2\2\2\u0092\u0093\7n\2\2\u0093\u0094"+ + "\7k\2\2\u0094\u0095\7m\2\2\u0095\u0096\7g\2\2\u0096\22\3\2\2\2\u0097\u0098"+ + "\7n\2\2\u0098\u0099\7k\2\2\u0099\u009a\7m\2\2\u009a\u009b\7g\2\2\u009b"+ + "\u009c\7\u0080\2\2\u009c\24\3\2\2\2\u009d\u009e\7o\2\2\u009e\u009f\7c"+ + "\2\2\u009f\u00a0\7z\2\2\u00a0\u00a1\7u\2\2\u00a1\u00a2\7r\2\2\u00a2\u00a3"+ + "\7c\2\2\u00a3\u00a4\7p\2\2\u00a4\26\3\2\2\2\u00a5\u00a6\7p\2\2\u00a6\u00a7"+ + "\7q\2\2\u00a7\u00a8\7v\2\2\u00a8\30\3\2\2\2\u00a9\u00aa\7p\2\2\u00aa\u00ab"+ + "\7w\2\2\u00ab\u00ac\7n\2\2\u00ac\u00ad\7n\2\2\u00ad\32\3\2\2\2\u00ae\u00af"+ + "\7q\2\2\u00af\u00b0\7h\2\2\u00b0\34\3\2\2\2\u00b1\u00b2\7q\2\2\u00b2\u00b3"+ + "\7t\2\2\u00b3\36\3\2\2\2\u00b4\u00b5\7t\2\2\u00b5\u00b6\7g\2\2\u00b6\u00b7"+ + "\7i\2\2\u00b7\u00b8\7g\2\2\u00b8\u00b9\7z\2\2\u00b9 \3\2\2\2\u00ba\u00bb"+ + "\7t\2\2\u00bb\u00bc\7g\2\2\u00bc\u00bd\7i\2\2\u00bd\u00be\7g\2\2\u00be"+ + "\u00bf\7z\2\2\u00bf\u00c0\7\u0080\2\2\u00c0\"\3\2\2\2\u00c1\u00c2\7u\2"+ + "\2\u00c2\u00c3\7g\2\2\u00c3\u00c4\7s\2\2\u00c4\u00c5\7w\2\2\u00c5\u00c6"+ + "\7g\2\2\u00c6\u00c7\7p\2\2\u00c7\u00c8\7e\2\2\u00c8\u00c9\7g\2\2\u00c9"+ + "$\3\2\2\2\u00ca\u00cb\7v\2\2\u00cb\u00cc\7t\2\2\u00cc\u00cd\7w\2\2\u00cd"+ + "\u00ce\7g\2\2\u00ce&\3\2\2\2\u00cf\u00d0\7w\2\2\u00d0\u00d1\7p\2\2\u00d1"+ + "\u00d2\7v\2\2\u00d2\u00d3\7k\2\2\u00d3\u00d4\7n\2\2\u00d4(\3\2\2\2\u00d5"+ + "\u00d6\7y\2\2\u00d6\u00d7\7j\2\2\u00d7\u00d8\7g\2\2\u00d8\u00d9\7t\2\2"+ + "\u00d9\u00da\7g\2\2\u00da*\3\2\2\2\u00db\u00dc\7y\2\2\u00dc\u00dd\7k\2"+ + "\2\u00dd\u00de\7v\2\2\u00de\u00df\7j\2\2\u00df,\3\2\2\2\u00e0\u00e1\7"+ + "<\2\2\u00e1.\3\2\2\2\u00e2\u00e3\7?\2\2\u00e3\60\3\2\2\2\u00e4\u00e5\7"+ + "?\2\2\u00e5\u00e6\7?\2\2\u00e6\62\3\2\2\2\u00e7\u00e8\7#\2\2\u00e8\u00e9"+ + "\7?\2\2\u00e9\64\3\2\2\2\u00ea\u00eb\7>\2\2\u00eb\66\3\2\2\2\u00ec\u00ed"+ + "\7>\2\2\u00ed\u00ee\7?\2\2\u00ee8\3\2\2\2\u00ef\u00f0\7@\2\2\u00f0:\3"+ + "\2\2\2\u00f1\u00f2\7@\2\2\u00f2\u00f3\7?\2\2\u00f3<\3\2\2\2\u00f4\u00f5"+ + "\7-\2\2\u00f5>\3\2\2\2\u00f6\u00f7\7/\2\2\u00f7@\3\2\2\2\u00f8\u00f9\7"+ + ",\2\2\u00f9B\3\2\2\2\u00fa\u00fb\7\61\2\2\u00fbD\3\2\2\2\u00fc\u00fd\7"+ + "\'\2\2\u00fdF\3\2\2\2\u00fe\u00ff\7\60\2\2\u00ffH\3\2\2\2\u0100\u0101"+ + "\7.\2\2\u0101J\3\2\2\2\u0102\u0103\7]\2\2\u0103L\3\2\2\2\u0104\u0105\7"+ + "_\2\2\u0105N\3\2\2\2\u0106\u0107\7*\2\2\u0107P\3\2\2\2\u0108\u0109\7+"+ + "\2\2\u0109R\3\2\2\2\u010a\u010b\7~\2\2\u010bT\3\2\2\2\u010c\u010d\7^\2"+ + "\2\u010d\u010e\t\2\2\2\u010eV\3\2\2\2\u010f\u0110\t\3\2\2\u0110X\3\2\2"+ + "\2\u0111\u0112\7^\2\2\u0112\u0113\7w\2\2\u0113\u0114\3\2\2\2\u0114\u0116"+ + "\7}\2\2\u0115\u0117\5W,\2\u0116\u0115\3\2\2\2\u0117\u0118\3\2\2\2\u0118"+ + "\u0116\3\2\2\2\u0118\u0119\3\2\2\2\u0119\u011a\3\2\2\2\u011a\u011b\7\177"+ + "\2\2\u011bZ\3\2\2\2\u011c\u011d\n\4\2\2\u011d\\\3\2\2\2\u011e\u0124\7"+ + "$\2\2\u011f\u0123\5U+\2\u0120\u0123\5Y-\2\u0121\u0123\5[.\2\u0122\u011f"+ + "\3\2\2\2\u0122\u0120\3\2\2\2\u0122\u0121\3\2\2\2\u0123\u0126\3\2\2\2\u0124"+ + "\u0122\3\2\2\2\u0124\u0125\3\2\2\2\u0125\u0127\3\2\2\2\u0126\u0124\3\2"+ + "\2\2\u0127\u015f\7$\2\2\u0128\u0129\7$\2\2\u0129\u012a\7$\2\2\u012a\u012b"+ + "\7$\2\2\u012b\u012f\3\2\2\2\u012c\u012e\n\5\2\2\u012d\u012c\3\2\2\2\u012e"+ + "\u0131\3\2\2\2\u012f\u0130\3\2\2\2\u012f\u012d\3\2\2\2\u0130\u0132\3\2"+ + "\2\2\u0131\u012f\3\2\2\2\u0132\u0133\7$\2\2\u0133\u0134\7$\2\2\u0134\u0135"+ + "\7$\2\2\u0135\u0137\3\2\2\2\u0136\u0138\7$\2\2\u0137\u0136\3\2\2\2\u0137"+ + "\u0138\3\2\2\2\u0138\u013a\3\2\2\2\u0139\u013b\7$\2\2\u013a\u0139\3\2"+ + "\2\2\u013a\u013b\3\2\2\2\u013b\u015f\3\2\2\2\u013c\u0142\7)\2\2\u013d"+ + "\u013e\7^\2\2\u013e\u0141\t\2\2\2\u013f\u0141\n\6\2\2\u0140\u013d\3\2"+ + "\2\2\u0140\u013f\3\2\2\2\u0141\u0144\3\2\2\2\u0142\u0140\3\2\2\2\u0142"+ + "\u0143\3\2\2\2\u0143\u0145\3\2\2\2\u0144\u0142\3\2\2\2\u0145\u015f\7)"+ + "\2\2\u0146\u0147\7A\2\2\u0147\u0148\7$\2\2\u0148\u014e\3\2\2\2\u0149\u014a"+ + "\7^\2\2\u014a\u014d\7$\2\2\u014b\u014d\n\7\2\2\u014c\u0149\3\2\2\2\u014c"+ + "\u014b\3\2\2\2\u014d\u0150\3\2\2\2\u014e\u014c\3\2\2\2\u014e\u014f\3\2"+ + "\2\2\u014f\u0151\3\2\2\2\u0150\u014e\3\2\2\2\u0151\u015f\7$\2\2\u0152"+ + "\u0153\7A\2\2\u0153\u0154\7)\2\2\u0154\u015a\3\2\2\2\u0155\u0156\7^\2"+ + "\2\u0156\u0159\7)\2\2\u0157\u0159\n\b\2\2\u0158\u0155\3\2\2\2\u0158\u0157"+ + "\3\2\2\2\u0159\u015c\3\2\2\2\u015a\u0158\3\2\2\2\u015a\u015b\3\2\2\2\u015b"+ + "\u015d\3\2\2\2\u015c\u015a\3\2\2\2\u015d\u015f\7)\2\2\u015e\u011e\3\2"+ + "\2\2\u015e\u0128\3\2\2\2\u015e\u013c\3\2\2\2\u015e\u0146\3\2\2\2\u015e"+ + "\u0152\3\2\2\2\u015f^\3\2\2\2\u0160\u0162\5k\66\2\u0161\u0160\3\2\2\2"+ + "\u0162\u0163\3\2\2\2\u0163\u0161\3\2\2\2\u0163\u0164\3\2\2\2\u0164`\3"+ + "\2\2\2\u0165\u0167\5k\66\2\u0166\u0165\3\2\2\2\u0167\u0168\3\2\2\2\u0168"+ + "\u0166\3\2\2\2\u0168\u0169\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u016e\5G"+ + "$\2\u016b\u016d\5k\66\2\u016c\u016b\3\2\2\2\u016d\u0170\3\2\2\2\u016e"+ + "\u016c\3\2\2\2\u016e\u016f\3\2\2\2\u016f\u0190\3\2\2\2\u0170\u016e\3\2"+ + "\2\2\u0171\u0173\5G$\2\u0172\u0174\5k\66\2\u0173\u0172\3\2\2\2\u0174\u0175"+ + "\3\2\2\2\u0175\u0173\3\2\2\2\u0175\u0176\3\2\2\2\u0176\u0190\3\2\2\2\u0177"+ + "\u0179\5k\66\2\u0178\u0177\3\2\2\2\u0179\u017a\3\2\2\2\u017a\u0178\3\2"+ + "\2\2\u017a\u017b\3\2\2\2\u017b\u0183\3\2\2\2\u017c\u0180\5G$\2\u017d\u017f"+ + "\5k\66\2\u017e\u017d\3\2\2\2\u017f\u0182\3\2\2\2\u0180\u017e\3\2\2\2\u0180"+ + "\u0181\3\2\2\2\u0181\u0184\3\2\2\2\u0182\u0180\3\2\2\2\u0183\u017c\3\2"+ + "\2\2\u0183\u0184\3\2\2\2\u0184\u0185\3\2\2\2\u0185\u0186\5i\65\2\u0186"+ + "\u0190\3\2\2\2\u0187\u0189\5G$\2\u0188\u018a\5k\66\2\u0189\u0188\3\2\2"+ + "\2\u018a\u018b\3\2\2\2\u018b\u0189\3\2\2\2\u018b\u018c\3\2\2\2\u018c\u018d"+ + "\3\2\2\2\u018d\u018e\5i\65\2\u018e\u0190\3\2\2\2\u018f\u0166\3\2\2\2\u018f"+ + "\u0171\3\2\2\2\u018f\u0178\3\2\2\2\u018f\u0187\3\2\2\2\u0190b\3\2\2\2"+ + "\u0191\u0194\5m\67\2\u0192\u0194\t\t\2\2\u0193\u0191\3\2\2\2\u0193\u0192"+ + "\3\2\2\2\u0194\u019a\3\2\2\2\u0195\u0199\5m\67\2\u0196\u0199\5k\66\2\u0197"+ + "\u0199\7a\2\2\u0198\u0195\3\2\2\2\u0198\u0196\3\2\2\2\u0198\u0197\3\2"+ + "\2\2\u0199\u019c\3\2\2\2\u019a\u0198\3\2\2\2\u019a\u019b\3\2\2\2\u019b"+ + "d\3\2\2\2\u019c\u019a\3\2\2\2\u019d\u01a3\7b\2\2\u019e\u01a2\n\n\2\2\u019f"+ + "\u01a0\7b\2\2\u01a0\u01a2\7b\2\2\u01a1\u019e\3\2\2\2\u01a1\u019f\3\2\2"+ + "\2\u01a2\u01a5\3\2\2\2\u01a3\u01a1\3\2\2\2\u01a3\u01a4\3\2\2\2\u01a4\u01a6"+ + "\3\2\2\2\u01a5\u01a3\3\2\2\2\u01a6\u01a7\7b\2\2\u01a7f\3\2\2\2\u01a8\u01ae"+ + "\5m\67\2\u01a9\u01ad\5m\67\2\u01aa\u01ad\5k\66\2\u01ab\u01ad\7a\2\2\u01ac"+ + "\u01a9\3\2\2\2\u01ac\u01aa\3\2\2\2\u01ac\u01ab\3\2\2\2\u01ad\u01b0\3\2"+ + "\2\2\u01ae\u01ac\3\2\2\2\u01ae\u01af\3\2\2\2\u01af\u01b1\3\2\2\2\u01b0"+ + "\u01ae\3\2\2\2\u01b1\u01b2\7\u0080\2\2\u01b2h\3\2\2\2\u01b3\u01b5\t\13"+ + "\2\2\u01b4\u01b6\t\f\2\2\u01b5\u01b4\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6"+ + "\u01b8\3\2\2\2\u01b7\u01b9\5k\66\2\u01b8\u01b7\3\2\2\2\u01b9\u01ba\3\2"+ + "\2\2\u01ba\u01b8\3\2\2\2\u01ba\u01bb\3\2\2\2\u01bbj\3\2\2\2\u01bc\u01bd"+ + "\t\r\2\2\u01bdl\3\2\2\2\u01be\u01bf\t\16\2\2\u01bfn\3\2\2\2\u01c0\u01c1"+ + "\7\61\2\2\u01c1\u01c2\7\61\2\2\u01c2\u01c6\3\2\2\2\u01c3\u01c5\n\5\2\2"+ + "\u01c4\u01c3\3\2\2\2\u01c5\u01c8\3\2\2\2\u01c6\u01c4\3\2\2\2\u01c6\u01c7"+ + "\3\2\2\2\u01c7\u01ca\3\2\2\2\u01c8\u01c6\3\2\2\2\u01c9\u01cb\7\17\2\2"+ + "\u01ca\u01c9\3\2\2\2\u01ca\u01cb\3\2\2\2\u01cb\u01cd\3\2\2\2\u01cc\u01ce"+ + "\7\f\2\2\u01cd\u01cc\3\2\2\2\u01cd\u01ce\3\2\2\2\u01ce\u01cf\3\2\2\2\u01cf"+ "\u01d0\b8\2\2\u01d0p\3\2\2\2\u01d1\u01d2\7\61\2\2\u01d2\u01d3\7,\2\2\u01d3"+ "\u01d8\3\2\2\2\u01d4\u01d7\5q9\2\u01d5\u01d7\13\2\2\2\u01d6\u01d4\3\2"+ "\2\2\u01d6\u01d5\3\2\2\2\u01d7\u01da\3\2\2\2\u01d8\u01d9\3\2\2\2\u01d8"+ diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseParser.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseParser.java index a7f98ae9524d8..67f2652b5496a 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseParser.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseParser.java @@ -1,31 +1,17 @@ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.xpack.eql.parser; - -import org.antlr.v4.runtime.FailedPredicateException; -import org.antlr.v4.runtime.NoViableAltException; -import org.antlr.v4.runtime.Parser; -import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.RecognitionException; -import org.antlr.v4.runtime.RuleContext; -import org.antlr.v4.runtime.RuntimeMetaData; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.Vocabulary; -import org.antlr.v4.runtime.VocabularyImpl; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.ParserATNSimulator; -import org.antlr.v4.runtime.atn.PredictionContextCache; +import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.tree.ParseTreeListener; -import org.antlr.v4.runtime.tree.ParseTreeVisitor; -import org.antlr.v4.runtime.tree.TerminalNode; - +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.misc.*; +import org.antlr.v4.runtime.tree.*; import java.util.List; +import java.util.Iterator; +import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class EqlBaseParser extends Parser { - static { RuntimeMetaData.checkVersion("4.5.3", RuntimeMetaData.VERSION); } + static { RuntimeMetaData.checkVersion("4.9.2", RuntimeMetaData.VERSION); } protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = @@ -48,31 +34,41 @@ class EqlBaseParser extends Parser { RULE_functionName = 21, RULE_constant = 22, RULE_comparisonOperator = 23, RULE_booleanValue = 24, RULE_qualifiedName = 25, RULE_identifier = 26, RULE_timeUnit = 27, RULE_number = 28, RULE_string = 29, RULE_eventValue = 30; - public static final String[] ruleNames = { - "singleStatement", "singleExpression", "statement", "query", "sequenceParams", - "sequence", "join", "pipe", "joinKeys", "joinTerm", "sequenceTerm", "subquery", - "eventQuery", "eventFilter", "expression", "booleanExpression", "valueExpression", - "operatorExpression", "predicate", "primaryExpression", "functionExpression", - "functionName", "constant", "comparisonOperator", "booleanValue", "qualifiedName", - "identifier", "timeUnit", "number", "string", "eventValue" - }; - - private static final String[] _LITERAL_NAMES = { - null, "'and'", "'any'", "'by'", "'false'", "'in'", "'in~'", "'join'", - "'like'", "'like~'", "'maxspan'", "'not'", "'null'", "'of'", "'or'", "'regex'", - "'regex~'", "'sequence'", "'true'", "'until'", "'where'", "'with'", "':'", - "'='", "'=='", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", - "'/'", "'%'", "'.'", "','", "'['", "']'", "'('", "')'", "'|'" - }; - private static final String[] _SYMBOLIC_NAMES = { - null, "AND", "ANY", "BY", "FALSE", "IN", "IN_INSENSITIVE", "JOIN", "LIKE", - "LIKE_INSENSITIVE", "MAXSPAN", "NOT", "NULL", "OF", "OR", "REGEX", "REGEX_INSENSITIVE", - "SEQUENCE", "TRUE", "UNTIL", "WHERE", "WITH", "SEQ", "ASGN", "EQ", "NEQ", - "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", - "DOT", "COMMA", "LB", "RB", "LP", "RP", "PIPE", "STRING", "INTEGER_VALUE", - "DECIMAL_VALUE", "IDENTIFIER", "QUOTED_IDENTIFIER", "TILDE_IDENTIFIER", - "LINE_COMMENT", "BRACKETED_COMMENT", "WS" - }; + private static String[] makeRuleNames() { + return new String[] { + "singleStatement", "singleExpression", "statement", "query", "sequenceParams", + "sequence", "join", "pipe", "joinKeys", "joinTerm", "sequenceTerm", "subquery", + "eventQuery", "eventFilter", "expression", "booleanExpression", "valueExpression", + "operatorExpression", "predicate", "primaryExpression", "functionExpression", + "functionName", "constant", "comparisonOperator", "booleanValue", "qualifiedName", + "identifier", "timeUnit", "number", "string", "eventValue" + }; + } + public static final String[] ruleNames = makeRuleNames(); + + private static String[] makeLiteralNames() { + return new String[] { + null, "'and'", "'any'", "'by'", "'false'", "'in'", "'in~'", "'join'", + "'like'", "'like~'", "'maxspan'", "'not'", "'null'", "'of'", "'or'", + "'regex'", "'regex~'", "'sequence'", "'true'", "'until'", "'where'", + "'with'", "':'", "'='", "'=='", "'!='", "'<'", "'<='", "'>'", "'>='", + "'+'", "'-'", "'*'", "'/'", "'%'", "'.'", "','", "'['", "']'", "'('", + "')'", "'|'" + }; + } + private static final String[] _LITERAL_NAMES = makeLiteralNames(); + private static String[] makeSymbolicNames() { + return new String[] { + null, "AND", "ANY", "BY", "FALSE", "IN", "IN_INSENSITIVE", "JOIN", "LIKE", + "LIKE_INSENSITIVE", "MAXSPAN", "NOT", "NULL", "OF", "OR", "REGEX", "REGEX_INSENSITIVE", + "SEQUENCE", "TRUE", "UNTIL", "WHERE", "WITH", "SEQ", "ASGN", "EQ", "NEQ", + "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", + "DOT", "COMMA", "LB", "RB", "LP", "RP", "PIPE", "STRING", "INTEGER_VALUE", + "DECIMAL_VALUE", "IDENTIFIER", "QUOTED_IDENTIFIER", "TILDE_IDENTIFIER", + "LINE_COMMENT", "BRACKETED_COMMENT", "WS" + }; + } + private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); /** @@ -122,6 +118,7 @@ public EqlBaseParser(TokenStream input) { super(input); _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } + public static class SingleStatementContext extends ParserRuleContext { public StatementContext statement() { return getRuleContext(StatementContext.class,0); @@ -315,6 +312,7 @@ public final QueryContext query() throws RecognitionException { enterRule(_localctx, 6, RULE_query); try { setState(78); + _errHandler.sync(this); switch (_input.LA(1)) { case SEQUENCE: enterOuterAlt(_localctx, 1); @@ -456,12 +454,14 @@ public final SequenceContext sequence() throws RecognitionException { setState(85); match(SEQUENCE); setState(94); + _errHandler.sync(this); switch (_input.LA(1)) { case BY: { setState(86); ((SequenceContext)_localctx).by = joinKeys(); setState(88); + _errHandler.sync(this); _la = _input.LA(1); if (_la==WITH) { { @@ -477,6 +477,7 @@ public final SequenceContext sequence() throws RecognitionException { setState(90); sequenceParams(); setState(92); + _errHandler.sync(this); _la = _input.LA(1); if (_la==BY) { { @@ -490,7 +491,7 @@ public final SequenceContext sequence() throws RecognitionException { case LB: break; default: - throw new NoViableAltException(this); + break; } setState(96); sequenceTerm(); @@ -509,6 +510,7 @@ public final SequenceContext sequence() throws RecognitionException { _la = _input.LA(1); } while ( _la==LB ); setState(104); + _errHandler.sync(this); _la = _input.LA(1); if (_la==UNTIL) { { @@ -575,6 +577,7 @@ public final JoinContext join() throws RecognitionException { setState(106); match(JOIN); setState(108); + _errHandler.sync(this); _la = _input.LA(1); if (_la==BY) { { @@ -600,6 +603,7 @@ public final JoinContext join() throws RecognitionException { _la = _input.LA(1); } while ( _la==LB ); setState(118); + _errHandler.sync(this); _la = _input.LA(1); if (_la==UNTIL) { { @@ -668,6 +672,7 @@ public final PipeContext pipe() throws RecognitionException { setState(121); ((PipeContext)_localctx).kind = match(IDENTIFIER); setState(130); + _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FALSE) | (1L << NOT) | (1L << NULL) | (1L << TRUE) | (1L << PLUS) | (1L << MINUS) | (1L << LP) | (1L << STRING) | (1L << INTEGER_VALUE) | (1L << DECIMAL_VALUE) | (1L << IDENTIFIER) | (1L << QUOTED_IDENTIFIER) | (1L << TILDE_IDENTIFIER))) != 0)) { { @@ -813,6 +818,7 @@ public final JoinTermContext joinTerm() throws RecognitionException { setState(141); subquery(); setState(143); + _errHandler.sync(this); _la = _input.LA(1); if (_la==BY) { { @@ -871,6 +877,7 @@ public final SequenceTermContext sequenceTerm() throws RecognitionException { setState(145); subquery(); setState(147); + _errHandler.sync(this); _la = _input.LA(1); if (_la==BY) { { @@ -1022,6 +1029,7 @@ public final EventFilterContext eventFilter() throws RecognitionException { enterOuterAlt(_localctx, 1); { setState(157); + _errHandler.sync(this); switch (_input.LA(1)) { case ANY: { @@ -1512,6 +1520,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE enterOuterAlt(_localctx, 1); { setState(198); + _errHandler.sync(this); switch (_input.LA(1)) { case FALSE: case NULL: @@ -1553,7 +1562,10 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { ((ArithmeticUnaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(197); @@ -1587,7 +1599,10 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ASTERISK) | (1L << SLASH) | (1L << PERCENT))) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(202); @@ -1606,7 +1621,10 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(205); @@ -1692,6 +1710,7 @@ public final PredicateContext predicate() throws RecognitionException { enterOuterAlt(_localctx, 1); { setState(212); + _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { @@ -1705,7 +1724,10 @@ public final PredicateContext predicate() throws RecognitionException { _la = _input.LA(1); if ( !(_la==IN || _la==IN_INSENSITIVE) ) { ((PredicateContext)_localctx).kind = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(215); @@ -1740,7 +1762,10 @@ public final PredicateContext predicate() throws RecognitionException { _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LIKE) | (1L << LIKE_INSENSITIVE) | (1L << REGEX) | (1L << REGEX_INSENSITIVE) | (1L << SEQ))) != 0)) ) { ((PredicateContext)_localctx).kind = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(227); @@ -1755,7 +1780,10 @@ public final PredicateContext predicate() throws RecognitionException { _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LIKE) | (1L << LIKE_INSENSITIVE) | (1L << REGEX) | (1L << REGEX_INSENSITIVE) | (1L << SEQ))) != 0)) ) { ((PredicateContext)_localctx).kind = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(229); @@ -1989,6 +2017,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx setState(252); match(LP); setState(261); + _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FALSE) | (1L << NOT) | (1L << NULL) | (1L << TRUE) | (1L << PLUS) | (1L << MINUS) | (1L << LP) | (1L << STRING) | (1L << INTEGER_VALUE) | (1L << DECIMAL_VALUE) | (1L << IDENTIFIER) | (1L << QUOTED_IDENTIFIER) | (1L << TILDE_IDENTIFIER))) != 0)) { { @@ -2061,7 +2090,10 @@ public final FunctionNameContext functionName() throws RecognitionException { _la = _input.LA(1); if ( !(_la==IDENTIFIER || _la==TILDE_IDENTIFIER) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -2168,6 +2200,7 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 44, RULE_constant); try { setState(271); + _errHandler.sync(this); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); @@ -2255,7 +2288,10 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << NEQ) | (1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0)) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -2304,7 +2340,10 @@ public final BooleanValueContext booleanValue() throws RecognitionException { _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -2379,6 +2418,7 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { if ( _alt==1 ) { { setState(287); + _errHandler.sync(this); switch (_input.LA(1)) { case DOT: { @@ -2465,7 +2505,10 @@ public final IdentifierContext identifier() throws RecognitionException { _la = _input.LA(1); if ( !(_la==IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -2516,6 +2559,7 @@ public final TimeUnitContext timeUnit() throws RecognitionException { setState(294); number(); setState(296); + _errHandler.sync(this); _la = _input.LA(1); if (_la==IDENTIFIER) { { @@ -2588,6 +2632,7 @@ public final NumberContext number() throws RecognitionException { enterRule(_localctx, 56, RULE_number); try { setState(300); + _errHandler.sync(this); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); @@ -2695,7 +2740,10 @@ public final EventValueContext eventValue() throws RecognitionException { _la = _input.LA(1); if ( !(_la==STRING || _la==IDENTIFIER) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -2740,7 +2788,7 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\64\u0135\4\2\t\2"+ + "\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\64\u0135\4\2\t\2"+ "\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -2766,9 +2814,9 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\u0125\13\33\3\34\3\34\3\35\3\35\5\35\u012b\n\35\3\36\3\36\5\36\u012f"+ "\n\36\3\37\3\37\3 \3 \3 \2\4 $!\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36"+ " \"$&(*,.\60\62\64\668:<>\2\13\3\2 !\3\2\"$\3\2\7\b\5\2\n\13\21\22\30"+ - "\30\4\2//\61\61\3\2\32\37\4\2\6\6\24\24\3\2/\60\4\2,,//\u0142\2@\3\2\2"+ - "\2\4C\3\2\2\2\6F\3\2\2\2\bP\3\2\2\2\nR\3\2\2\2\fW\3\2\2\2\16l\3\2\2\2"+ - "\20z\3\2\2\2\22\u0086\3\2\2\2\24\u008f\3\2\2\2\26\u0093\3\2\2\2\30\u0097"+ + "\30\4\2//\61\61\3\2\32\37\4\2\6\6\24\24\3\2/\60\4\2,,//\2\u0142\2@\3\2"+ + "\2\2\4C\3\2\2\2\6F\3\2\2\2\bP\3\2\2\2\nR\3\2\2\2\fW\3\2\2\2\16l\3\2\2"+ + "\2\20z\3\2\2\2\22\u0086\3\2\2\2\24\u008f\3\2\2\2\26\u0093\3\2\2\2\30\u0097"+ "\3\2\2\2\32\u009b\3\2\2\2\34\u009f\3\2\2\2\36\u00a4\3\2\2\2 \u00ad\3\2"+ "\2\2\"\u00bf\3\2\2\2$\u00c8\3\2\2\2&\u00f2\3\2\2\2(\u00fb\3\2\2\2*\u00fd"+ "\3\2\2\2,\u010b\3\2\2\2.\u0111\3\2\2\2\60\u0113\3\2\2\2\62\u0115\3\2\2"+ diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java index 0e40a727a9983..7534b05401657 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/VerifierTests.java @@ -30,6 +30,7 @@ import java.util.function.Function; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.startsWith; public class VerifierTests extends ESTestCase { @@ -94,7 +95,7 @@ public void testQueryCondition() { } public void testQueryStartsWithNumber() { - assertEquals("1:1: no viable alternative at input '42'", errorParsing("42 where true")); + assertThat(errorParsing("42 where true"), startsWith("1:1: mismatched input '42' expecting")); } public void testMissingColumn() { diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/parser/ExpressionTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/parser/ExpressionTests.java index ab9526f8e206d..d9243f8bf1af1 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/parser/ExpressionTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/parser/ExpressionTests.java @@ -380,7 +380,7 @@ public void testBackQuotedIdentifierWithUnescapedBackQuotes() { public void testIdentifierForEventTypeDisallowed() { ParsingException e = expectThrows(ParsingException.class, "Expected syntax error", () -> parser.createStatement("`identifier` where foo == true")); - assertEquals("line 1:1: no viable alternative at input '`identifier`'", e.getMessage()); + assertThat(e.getMessage(), startsWith("line 1:1: mismatched input '`identifier`' expecting")); } public void testFunctions() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java index 03cadeaaa2426..21e57bf267115 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java @@ -11,7 +11,7 @@ @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class SqlBaseLexer extends Lexer { - static { RuntimeMetaData.checkVersion("4.5.3", RuntimeMetaData.VERSION); } + static { RuntimeMetaData.checkVersion("4.9.2", RuntimeMetaData.VERSION); } protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = @@ -38,74 +38,88 @@ class SqlBaseLexer extends Lexer { DECIMAL_VALUE=129, IDENTIFIER=130, DIGIT_IDENTIFIER=131, TABLE_IDENTIFIER=132, QUOTED_IDENTIFIER=133, BACKQUOTED_IDENTIFIER=134, SIMPLE_COMMENT=135, BRACKETED_COMMENT=136, WS=137, UNRECOGNIZED=138; + public static String[] channelNames = { + "DEFAULT_TOKEN_CHANNEL", "HIDDEN" + }; + public static String[] modeNames = { "DEFAULT_MODE" }; - public static final String[] ruleNames = { - "T__0", "T__1", "T__2", "T__3", "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", - "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", "COLUMNS", - "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", - "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FOR", - "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", - "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", - "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", - "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", - "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", - "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", - "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TOP", "TYPE", - "TYPES", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", - "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", - "GUID_ESC", "ESC_START", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", - "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", - "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", - "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", - "EXPONENT", "DIGIT", "LETTER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", - "WS", "UNRECOGNIZED" - }; + private static String[] makeRuleNames() { + return new String[] { + "T__0", "T__1", "T__2", "T__3", "ALL", "ANALYZE", "ANALYZED", "AND", + "ANY", "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", + "COLUMNS", "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", + "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", + "ESCAPE", "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", + "FOR", "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", + "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", + "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", + "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", + "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", + "SHOW", "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TOP", + "TYPE", "TYPES", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", + "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", + "TIMESTAMP_ESC", "GUID_ESC", "ESC_START", "ESC_END", "EQ", "NULLEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "CAST_OP", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", + "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", + "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER", "SIMPLE_COMMENT", + "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" + }; + } + public static final String[] ruleNames = makeRuleNames(); - private static final String[] _LITERAL_NAMES = { - null, "'('", "')'", "','", "':'", "'ALL'", "'ANALYZE'", "'ANALYZED'", - "'AND'", "'ANY'", "'AS'", "'ASC'", "'BETWEEN'", "'BY'", "'CASE'", "'CAST'", - "'CATALOG'", "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'CURRENT_DATE'", - "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DAY'", "'DAYS'", "'DEBUG'", - "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ELSE'", "'END'", "'ESCAPE'", "'EXECUTABLE'", - "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FOR'", "'FORMAT'", - "'FROM'", "'FROZEN'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", - "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INCLUDE'", "'INNER'", "'INTERVAL'", - "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", - "'MATCH'", "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", - "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", - "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PIVOT'", "'PLAN'", "'RIGHT'", "'RLIKE'", - "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", - "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", - "'TOP'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", - "'WITH'", "'YEAR'", "'YEARS'", null, null, null, null, null, null, null, - null, "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", "'::'", "'.'", "'?'" - }; - private static final String[] _SYMBOLIC_NAMES = { - null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", - "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", "COLUMNS", - "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", - "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FOR", - "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", - "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", - "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", - "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", - "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", - "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", - "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TOP", "TYPE", - "TYPES", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", - "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", - "GUID_ESC", "ESC_START", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", - "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", - "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", - "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", - "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" - }; + private static String[] makeLiteralNames() { + return new String[] { + null, "'('", "')'", "','", "':'", "'ALL'", "'ANALYZE'", "'ANALYZED'", + "'AND'", "'ANY'", "'AS'", "'ASC'", "'BETWEEN'", "'BY'", "'CASE'", "'CAST'", + "'CATALOG'", "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'CURRENT_DATE'", + "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DAY'", "'DAYS'", "'DEBUG'", + "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ELSE'", "'END'", "'ESCAPE'", + "'EXECUTABLE'", "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", + "'FOR'", "'FORMAT'", "'FROM'", "'FROZEN'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", + "'GROUP'", "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INCLUDE'", "'INNER'", + "'INTERVAL'", "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", + "'MAPPED'", "'MATCH'", "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", + "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", + "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PIVOT'", "'PLAN'", + "'RIGHT'", "'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", + "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'THEN'", + "'TRUE'", "'TO'", "'TOP'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", + "'WHEN'", "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", null, null, null, + null, null, null, null, null, "'}'", "'='", "'<=>'", null, "'<'", "'<='", + "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'::'", "'.'", "'?'" + }; + } + private static final String[] _LITERAL_NAMES = makeLiteralNames(); + private static String[] makeSymbolicNames() { + return new String[] { + null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", + "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", + "COLUMNS", "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", + "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", + "ESCAPE", "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", + "FOR", "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", + "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", + "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", + "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", + "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", + "SHOW", "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TOP", + "TYPE", "TYPES", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", + "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", + "TIMESTAMP_ESC", "GUID_ESC", "ESC_START", "ESC_END", "EQ", "NULLEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "CAST_OP", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", + "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", + "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", + "UNRECOGNIZED" + }; + } + private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); /** @@ -154,6 +168,9 @@ public SqlBaseLexer(CharStream input) { @Override public String getSerializedATN() { return _serializedATN; } + @Override + public String[] getChannelNames() { return channelNames; } + @Override public String[] getModeNames() { return modeNames; } @@ -161,7 +178,7 @@ public SqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\u008c\u0489\b\1\4"+ + "\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\u008c\u0489\b\1\4"+ "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+ "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ @@ -251,17 +268,17 @@ public SqlBaseLexer(CharStream input) { "\u0081\u0101\u0082\u0103\u0083\u0105\u0084\u0107\u0085\u0109\u0086\u010b"+ "\u0087\u010d\u0088\u010f\2\u0111\2\u0113\2\u0115\u0089\u0117\u008a\u0119"+ "\u008b\u011b\u008c\3\2\13\3\2))\4\2BBaa\3\2$$\3\2bb\4\2--//\3\2\62;\3"+ - "\2C\\\4\2\f\f\17\17\5\2\13\f\17\17\"\"\u04aa\2\3\3\2\2\2\2\5\3\2\2\2\2"+ - "\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2"+ - "\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2"+ - "\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2"+ - "\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2"+ - "\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2"+ - "\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2"+ - "M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3"+ - "\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2"+ - "\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2"+ - "s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177"+ + "\2C\\\4\2\f\f\17\17\5\2\13\f\17\17\"\"\2\u04aa\2\3\3\2\2\2\2\5\3\2\2\2"+ + "\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3"+ + "\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2"+ + "\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2"+ + "\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2"+ + "\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2"+ + "\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2"+ + "\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y"+ + "\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2"+ + "\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2"+ + "\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177"+ "\3\2\2\2\2\u0081\3\2\2\2\2\u0083\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2"+ "\2\2\u0089\3\2\2\2\2\u008b\3\2\2\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091"+ "\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2\2\2\u0097\3\2\2\2\2\u0099\3\2\2"+ diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index 18e3528b71f5a..b0d494e539066 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -11,7 +11,7 @@ @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class SqlBaseParser extends Parser { - static { RuntimeMetaData.checkVersion("4.5.3", RuntimeMetaData.VERSION); } + static { RuntimeMetaData.checkVersion("4.9.2", RuntimeMetaData.VERSION); } protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = @@ -57,63 +57,73 @@ class SqlBaseParser extends Parser { RULE_qualifiedName = 51, RULE_identifier = 52, RULE_tableIdentifier = 53, RULE_quoteIdentifier = 54, RULE_unquoteIdentifier = 55, RULE_number = 56, RULE_string = 57, RULE_whenClause = 58, RULE_nonReserved = 59; - public static final String[] ruleNames = { - "singleStatement", "singleExpression", "statement", "query", "queryNoWith", - "limitClause", "queryTerm", "orderBy", "querySpecification", "fromClause", - "groupBy", "groupingElement", "groupingExpressions", "namedQuery", "topClause", - "setQuantifier", "selectItems", "selectItem", "relation", "joinRelation", - "joinType", "joinCriteria", "relationPrimary", "pivotClause", "pivotArgs", - "namedValueExpression", "expression", "booleanExpression", "matchQueryOptions", - "predicated", "predicate", "likePattern", "pattern", "patternEscape", - "valueExpression", "primaryExpression", "builtinDateTimeFunction", "castExpression", - "castTemplate", "convertTemplate", "extractExpression", "extractTemplate", - "functionExpression", "functionTemplate", "functionName", "constant", - "comparisonOperator", "booleanValue", "interval", "intervalField", "dataType", - "qualifiedName", "identifier", "tableIdentifier", "quoteIdentifier", "unquoteIdentifier", - "number", "string", "whenClause", "nonReserved" - }; - - private static final String[] _LITERAL_NAMES = { - null, "'('", "')'", "','", "':'", "'ALL'", "'ANALYZE'", "'ANALYZED'", - "'AND'", "'ANY'", "'AS'", "'ASC'", "'BETWEEN'", "'BY'", "'CASE'", "'CAST'", - "'CATALOG'", "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'CURRENT_DATE'", - "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DAY'", "'DAYS'", "'DEBUG'", - "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ELSE'", "'END'", "'ESCAPE'", "'EXECUTABLE'", - "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FOR'", "'FORMAT'", - "'FROM'", "'FROZEN'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", - "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INCLUDE'", "'INNER'", "'INTERVAL'", - "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", - "'MATCH'", "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", - "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", - "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PIVOT'", "'PLAN'", "'RIGHT'", "'RLIKE'", - "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", - "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", - "'TOP'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", - "'WITH'", "'YEAR'", "'YEARS'", null, null, null, null, null, null, null, - null, "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", "'::'", "'.'", "'?'" - }; - private static final String[] _SYMBOLIC_NAMES = { - null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", - "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", "COLUMNS", - "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", - "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FOR", - "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", - "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", - "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", - "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", - "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", - "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", - "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TOP", "TYPE", - "TYPES", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", - "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", - "GUID_ESC", "ESC_START", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", - "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", - "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", - "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", - "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED", "DELIMITER" - }; + private static String[] makeRuleNames() { + return new String[] { + "singleStatement", "singleExpression", "statement", "query", "queryNoWith", + "limitClause", "queryTerm", "orderBy", "querySpecification", "fromClause", + "groupBy", "groupingElement", "groupingExpressions", "namedQuery", "topClause", + "setQuantifier", "selectItems", "selectItem", "relation", "joinRelation", + "joinType", "joinCriteria", "relationPrimary", "pivotClause", "pivotArgs", + "namedValueExpression", "expression", "booleanExpression", "matchQueryOptions", + "predicated", "predicate", "likePattern", "pattern", "patternEscape", + "valueExpression", "primaryExpression", "builtinDateTimeFunction", "castExpression", + "castTemplate", "convertTemplate", "extractExpression", "extractTemplate", + "functionExpression", "functionTemplate", "functionName", "constant", + "comparisonOperator", "booleanValue", "interval", "intervalField", "dataType", + "qualifiedName", "identifier", "tableIdentifier", "quoteIdentifier", + "unquoteIdentifier", "number", "string", "whenClause", "nonReserved" + }; + } + public static final String[] ruleNames = makeRuleNames(); + + private static String[] makeLiteralNames() { + return new String[] { + null, "'('", "')'", "','", "':'", "'ALL'", "'ANALYZE'", "'ANALYZED'", + "'AND'", "'ANY'", "'AS'", "'ASC'", "'BETWEEN'", "'BY'", "'CASE'", "'CAST'", + "'CATALOG'", "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'CURRENT_DATE'", + "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DAY'", "'DAYS'", "'DEBUG'", + "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ELSE'", "'END'", "'ESCAPE'", + "'EXECUTABLE'", "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", + "'FOR'", "'FORMAT'", "'FROM'", "'FROZEN'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", + "'GROUP'", "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INCLUDE'", "'INNER'", + "'INTERVAL'", "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", + "'MAPPED'", "'MATCH'", "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", + "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", + "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PIVOT'", "'PLAN'", + "'RIGHT'", "'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", + "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'THEN'", + "'TRUE'", "'TO'", "'TOP'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", + "'WHEN'", "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", null, null, null, + null, null, null, null, null, "'}'", "'='", "'<=>'", null, "'<'", "'<='", + "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'::'", "'.'", "'?'" + }; + } + private static final String[] _LITERAL_NAMES = makeLiteralNames(); + private static String[] makeSymbolicNames() { + return new String[] { + null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", + "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", + "COLUMNS", "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", + "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", + "ESCAPE", "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", + "FOR", "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", + "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", + "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", + "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", + "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", + "SHOW", "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TOP", + "TYPE", "TYPES", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", + "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", + "TIMESTAMP_ESC", "GUID_ESC", "ESC_START", "ESC_END", "EQ", "NULLEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "CAST_OP", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", + "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", + "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", + "UNRECOGNIZED", "DELIMITER" + }; + } + private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); /** @@ -163,6 +173,7 @@ public SqlBaseParser(TokenStream input) { super(input); _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } + public static class SingleStatementContext extends ParserRuleContext { public StatementContext statement() { return getRuleContext(StatementContext.class,0); @@ -639,6 +650,7 @@ public final StatementContext statement() throws RecognitionException { while (((((_la - 39)) & ~0x3f) == 0 && ((1L << (_la - 39)) & ((1L << (FORMAT - 39)) | (1L << (PLAN - 39)) | (1L << (VERIFY - 39)))) != 0)) { { setState(135); + _errHandler.sync(this); switch (_input.LA(1)) { case PLAN: { @@ -649,7 +661,10 @@ public final StatementContext statement() throws RecognitionException { _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ALL) | (1L << ANALYZED) | (1L << EXECUTABLE) | (1L << MAPPED))) != 0) || _la==OPTIMIZED || _la==PARSED) ) { ((ExplainContext)_localctx).type = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -663,7 +678,10 @@ public final StatementContext statement() throws RecognitionException { _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { ((ExplainContext)_localctx).format = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -712,6 +730,7 @@ public final StatementContext statement() throws RecognitionException { while (_la==FORMAT || _la==PLAN) { { setState(150); + _errHandler.sync(this); switch (_input.LA(1)) { case PLAN: { @@ -722,7 +741,10 @@ public final StatementContext statement() throws RecognitionException { _la = _input.LA(1); if ( !(_la==ANALYZED || _la==OPTIMIZED) ) { ((DebugContext)_localctx).type = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -736,7 +758,10 @@ public final StatementContext statement() throws RecognitionException { _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { ((DebugContext)_localctx).format = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -767,6 +792,7 @@ public final StatementContext statement() throws RecognitionException { setState(160); match(TABLES); setState(163); + _errHandler.sync(this); _la = _input.LA(1); if (_la==INCLUDE) { { @@ -778,6 +804,7 @@ public final StatementContext statement() throws RecognitionException { } setState(167); + _errHandler.sync(this); switch (_input.LA(1)) { case LIKE: { @@ -839,7 +866,7 @@ public final StatementContext statement() throws RecognitionException { case EOF: break; default: - throw new NoViableAltException(this); + break; } } break; @@ -852,6 +879,7 @@ public final StatementContext statement() throws RecognitionException { setState(170); match(COLUMNS); setState(173); + _errHandler.sync(this); _la = _input.LA(1); if (_la==INCLUDE) { { @@ -866,10 +894,14 @@ public final StatementContext statement() throws RecognitionException { _la = _input.LA(1); if ( !(_la==FROM || _la==IN) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(178); + _errHandler.sync(this); switch (_input.LA(1)) { case LIKE: { @@ -941,10 +973,14 @@ public final StatementContext statement() throws RecognitionException { _la = _input.LA(1); if ( !(_la==DESC || _la==DESCRIBE) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(183); + _errHandler.sync(this); _la = _input.LA(1); if (_la==INCLUDE) { { @@ -956,6 +992,7 @@ public final StatementContext statement() throws RecognitionException { } setState(187); + _errHandler.sync(this); switch (_input.LA(1)) { case LIKE: { @@ -1028,6 +1065,7 @@ public final StatementContext statement() throws RecognitionException { setState(190); match(FUNCTIONS); setState(192); + _errHandler.sync(this); _la = _input.LA(1); if (_la==LIKE) { { @@ -1057,6 +1095,7 @@ public final StatementContext statement() throws RecognitionException { setState(197); match(TABLES); setState(200); + _errHandler.sync(this); _la = _input.LA(1); if (_la==CATALOG) { { @@ -1084,6 +1123,7 @@ public final StatementContext statement() throws RecognitionException { break; } setState(215); + _errHandler.sync(this); _la = _input.LA(1); if (_la==TYPE) { { @@ -1121,6 +1161,7 @@ public final StatementContext statement() throws RecognitionException { setState(218); match(COLUMNS); setState(221); + _errHandler.sync(this); _la = _input.LA(1); if (_la==CATALOG) { { @@ -1132,6 +1173,7 @@ public final StatementContext statement() throws RecognitionException { } setState(226); + _errHandler.sync(this); switch (_input.LA(1)) { case TABLE: { @@ -1196,9 +1238,10 @@ public final StatementContext statement() throws RecognitionException { case LIKE: break; default: - throw new NoViableAltException(this); + break; } setState(229); + _errHandler.sync(this); _la = _input.LA(1); if (_la==LIKE) { { @@ -1218,10 +1261,12 @@ public final StatementContext statement() throws RecognitionException { setState(232); match(TYPES); setState(237); + _errHandler.sync(this); _la = _input.LA(1); if (((((_la - 119)) & ~0x3f) == 0 && ((1L << (_la - 119)) & ((1L << (PLUS - 119)) | (1L << (MINUS - 119)) | (1L << (INTEGER_VALUE - 119)) | (1L << (DECIMAL_VALUE - 119)))) != 0)) { { setState(234); + _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { @@ -1229,7 +1274,10 @@ public final StatementContext statement() throws RecognitionException { _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -1293,6 +1341,7 @@ public final QueryContext query() throws RecognitionException { enterOuterAlt(_localctx, 1); { setState(250); + _errHandler.sync(this); _la = _input.LA(1); if (_la==WITH) { { @@ -1378,6 +1427,7 @@ public final QueryNoWithContext queryNoWith() throws RecognitionException { setState(254); queryTerm(); setState(265); + _errHandler.sync(this); _la = _input.LA(1); if (_la==ORDER) { { @@ -1407,6 +1457,7 @@ public final QueryNoWithContext queryNoWith() throws RecognitionException { } setState(268); + _errHandler.sync(this); _la = _input.LA(1); if (_la==LIMIT || _la==LIMIT_ESC) { { @@ -1460,6 +1511,7 @@ public final LimitClauseContext limitClause() throws RecognitionException { int _la; try { setState(275); + _errHandler.sync(this); switch (_input.LA(1)) { case LIMIT: enterOuterAlt(_localctx, 1); @@ -1471,7 +1523,10 @@ public final LimitClauseContext limitClause() throws RecognitionException { _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { ((LimitClauseContext)_localctx).limit = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -1486,7 +1541,10 @@ public final LimitClauseContext limitClause() throws RecognitionException { _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { ((LimitClauseContext)_localctx).limit = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(274); @@ -1563,6 +1621,7 @@ public final QueryTermContext queryTerm() throws RecognitionException { enterRule(_localctx, 12, RULE_queryTerm); try { setState(282); + _errHandler.sync(this); switch (_input.LA(1)) { case SELECT: _localctx = new QueryPrimaryDefaultContext(_localctx); @@ -1639,6 +1698,7 @@ public final OrderByContext orderBy() throws RecognitionException { setState(284); expression(); setState(286); + _errHandler.sync(this); _la = _input.LA(1); if (_la==ASC || _la==DESC) { { @@ -1647,13 +1707,17 @@ public final OrderByContext orderBy() throws RecognitionException { _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { ((OrderByContext)_localctx).ordering = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } } setState(290); + _errHandler.sync(this); _la = _input.LA(1); if (_la==NULLS) { { @@ -1664,7 +1728,10 @@ public final OrderByContext orderBy() throws RecognitionException { _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { ((OrderByContext)_localctx).nullOrdering = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -1751,6 +1818,7 @@ public final QuerySpecificationContext querySpecification() throws RecognitionEx break; } setState(297); + _errHandler.sync(this); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { @@ -1762,6 +1830,7 @@ public final QuerySpecificationContext querySpecification() throws RecognitionEx setState(299); selectItems(); setState(301); + _errHandler.sync(this); _la = _input.LA(1); if (_la==FROM) { { @@ -1771,6 +1840,7 @@ public final QuerySpecificationContext querySpecification() throws RecognitionEx } setState(305); + _errHandler.sync(this); _la = _input.LA(1); if (_la==WHERE) { { @@ -1782,6 +1852,7 @@ public final QuerySpecificationContext querySpecification() throws RecognitionEx } setState(310); + _errHandler.sync(this); _la = _input.LA(1); if (_la==GROUP) { { @@ -1795,6 +1866,7 @@ public final QuerySpecificationContext querySpecification() throws RecognitionEx } setState(314); + _errHandler.sync(this); _la = _input.LA(1); if (_la==HAVING) { { @@ -1876,6 +1948,7 @@ public final FromClauseContext fromClause() throws RecognitionException { _la = _input.LA(1); } setState(326); + _errHandler.sync(this); _la = _input.LA(1); if (_la==PIVOT) { { @@ -1934,6 +2007,7 @@ public final GroupByContext groupBy() throws RecognitionException { enterOuterAlt(_localctx, 1); { setState(329); + _errHandler.sync(this); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { @@ -2066,6 +2140,7 @@ public final GroupingExpressionsContext groupingExpressions() throws Recognition setState(341); match(T__0); setState(350); + _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & ((1L << (NOT - 66)) | (1L << (NULL - 66)) | (1L << (OPTIMIZED - 66)) | (1L << (PARSED - 66)) | (1L << (PHYSICAL - 66)) | (1L << (PIVOT - 66)) | (1L << (PLAN - 66)) | (1L << (RIGHT - 66)) | (1L << (RLIKE - 66)) | (1L << (QUERY - 66)) | (1L << (SCHEMAS - 66)) | (1L << (SECOND - 66)) | (1L << (SHOW - 66)) | (1L << (SYS - 66)) | (1L << (TABLES - 66)) | (1L << (TEXT - 66)) | (1L << (TRUE - 66)) | (1L << (TOP - 66)) | (1L << (TYPE - 66)) | (1L << (TYPES - 66)) | (1L << (VERIFY - 66)) | (1L << (YEAR - 66)) | (1L << (FUNCTION_ESC - 66)) | (1L << (DATE_ESC - 66)) | (1L << (TIME_ESC - 66)) | (1L << (TIMESTAMP_ESC - 66)) | (1L << (GUID_ESC - 66)) | (1L << (PLUS - 66)) | (1L << (MINUS - 66)) | (1L << (ASTERISK - 66)) | (1L << (PARAM - 66)) | (1L << (STRING - 66)) | (1L << (INTEGER_VALUE - 66)) | (1L << (DECIMAL_VALUE - 66)))) != 0) || ((((_la - 130)) & ~0x3f) == 0 && ((1L << (_la - 130)) & ((1L << (IDENTIFIER - 130)) | (1L << (DIGIT_IDENTIFIER - 130)) | (1L << (QUOTED_IDENTIFIER - 130)) | (1L << (BACKQUOTED_IDENTIFIER - 130)))) != 0)) { { @@ -2250,7 +2325,10 @@ public final SetQuantifierContext setQuantifier() throws RecognitionException { _la = _input.LA(1); if ( !(_la==ALL || _la==DISTINCT) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -2381,6 +2459,7 @@ public final SelectItemContext selectItem() throws RecognitionException { case 1: { setState(377); + _errHandler.sync(this); _la = _input.LA(1); if (_la==AS) { { @@ -2510,6 +2589,7 @@ public final JoinRelationContext joinRelation() throws RecognitionException { int _la; try { setState(400); + _errHandler.sync(this); switch (_input.LA(1)) { case FULL: case INNER: @@ -2527,6 +2607,7 @@ public final JoinRelationContext joinRelation() throws RecognitionException { setState(391); ((JoinRelationContext)_localctx).right = relationPrimary(); setState(393); + _errHandler.sync(this); _la = _input.LA(1); if (_la==ON || _la==USING) { { @@ -2596,12 +2677,14 @@ public final JoinTypeContext joinType() throws RecognitionException { int _la; try { setState(417); + _errHandler.sync(this); switch (_input.LA(1)) { case INNER: case JOIN: enterOuterAlt(_localctx, 1); { setState(403); + _errHandler.sync(this); _la = _input.LA(1); if (_la==INNER) { { @@ -2618,6 +2701,7 @@ public final JoinTypeContext joinType() throws RecognitionException { setState(405); match(LEFT); setState(407); + _errHandler.sync(this); _la = _input.LA(1); if (_la==OUTER) { { @@ -2634,6 +2718,7 @@ public final JoinTypeContext joinType() throws RecognitionException { setState(409); match(RIGHT); setState(411); + _errHandler.sync(this); _la = _input.LA(1); if (_la==OUTER) { { @@ -2650,6 +2735,7 @@ public final JoinTypeContext joinType() throws RecognitionException { setState(413); match(FULL); setState(415); + _errHandler.sync(this); _la = _input.LA(1); if (_la==OUTER) { { @@ -2712,6 +2798,7 @@ public final JoinCriteriaContext joinCriteria() throws RecognitionException { int _la; try { setState(433); + _errHandler.sync(this); switch (_input.LA(1)) { case ON: enterOuterAlt(_localctx, 1); @@ -2861,6 +2948,7 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio enterOuterAlt(_localctx, 1); { setState(436); + _errHandler.sync(this); _la = _input.LA(1); if (_la==FROZEN) { { @@ -2877,6 +2965,7 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio case 1: { setState(440); + _errHandler.sync(this); _la = _input.LA(1); if (_la==AS) { { @@ -2908,6 +2997,7 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio case 1: { setState(449); + _errHandler.sync(this); _la = _input.LA(1); if (_la==AS) { { @@ -2939,6 +3029,7 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio case 1: { setState(458); + _errHandler.sync(this); _la = _input.LA(1); if (_la==AS) { { @@ -3142,10 +3233,12 @@ public final NamedValueExpressionContext namedValueExpression() throws Recogniti setState(484); valueExpression(0); setState(489); + _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TOP - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)) | (1L << (IDENTIFIER - 70)) | (1L << (DIGIT_IDENTIFIER - 70)) | (1L << (QUOTED_IDENTIFIER - 70)))) != 0) || _la==BACKQUOTED_IDENTIFIER) { { setState(486); + _errHandler.sync(this); _la = _input.LA(1); if (_la==AS) { { @@ -3744,6 +3837,7 @@ public final PredicateContext predicate() throws RecognitionException { enterOuterAlt(_localctx, 1); { setState(549); + _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { @@ -3766,6 +3860,7 @@ public final PredicateContext predicate() throws RecognitionException { enterOuterAlt(_localctx, 2); { setState(557); + _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { @@ -3804,6 +3899,7 @@ public final PredicateContext predicate() throws RecognitionException { enterOuterAlt(_localctx, 3); { setState(572); + _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { @@ -3826,6 +3922,7 @@ public final PredicateContext predicate() throws RecognitionException { enterOuterAlt(_localctx, 4); { setState(580); + _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { @@ -3844,6 +3941,7 @@ public final PredicateContext predicate() throws RecognitionException { enterOuterAlt(_localctx, 5); { setState(585); + _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { @@ -3864,6 +3962,7 @@ public final PredicateContext predicate() throws RecognitionException { setState(589); match(IS); setState(591); + _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { @@ -4026,6 +4125,7 @@ public final PatternEscapeContext patternEscape() throws RecognitionException { enterRule(_localctx, 66, RULE_patternEscape); try { setState(609); + _errHandler.sync(this); switch (_input.LA(1)) { case ESCAPE: enterOuterAlt(_localctx, 1); @@ -4189,6 +4289,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti enterOuterAlt(_localctx, 1); { setState(615); + _errHandler.sync(this); switch (_input.LA(1)) { case T__0: case ANALYZE: @@ -4275,7 +4376,10 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { ((ArithmeticUnaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(614); @@ -4309,7 +4413,10 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _la = _input.LA(1); if ( !(((((_la - 121)) & ~0x3f) == 0 && ((1L << (_la - 121)) & ((1L << (ASTERISK - 121)) | (1L << (SLASH - 121)) | (1L << (PERCENT - 121)))) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(619); @@ -4328,7 +4435,10 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } setState(622); @@ -4671,6 +4781,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; setState(640); + _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TOP - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)) | (1L << (IDENTIFIER - 70)) | (1L << (DIGIT_IDENTIFIER - 70)) | (1L << (QUOTED_IDENTIFIER - 70)))) != 0) || _la==BACKQUOTED_IDENTIFIER) { { @@ -4737,6 +4848,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc setState(653); match(CASE); setState(655); + _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & ((1L << (NOT - 66)) | (1L << (NULL - 66)) | (1L << (OPTIMIZED - 66)) | (1L << (PARSED - 66)) | (1L << (PHYSICAL - 66)) | (1L << (PIVOT - 66)) | (1L << (PLAN - 66)) | (1L << (RIGHT - 66)) | (1L << (RLIKE - 66)) | (1L << (QUERY - 66)) | (1L << (SCHEMAS - 66)) | (1L << (SECOND - 66)) | (1L << (SHOW - 66)) | (1L << (SYS - 66)) | (1L << (TABLES - 66)) | (1L << (TEXT - 66)) | (1L << (TRUE - 66)) | (1L << (TOP - 66)) | (1L << (TYPE - 66)) | (1L << (TYPES - 66)) | (1L << (VERIFY - 66)) | (1L << (YEAR - 66)) | (1L << (FUNCTION_ESC - 66)) | (1L << (DATE_ESC - 66)) | (1L << (TIME_ESC - 66)) | (1L << (TIMESTAMP_ESC - 66)) | (1L << (GUID_ESC - 66)) | (1L << (PLUS - 66)) | (1L << (MINUS - 66)) | (1L << (ASTERISK - 66)) | (1L << (PARAM - 66)) | (1L << (STRING - 66)) | (1L << (INTEGER_VALUE - 66)) | (1L << (DECIMAL_VALUE - 66)))) != 0) || ((((_la - 130)) & ~0x3f) == 0 && ((1L << (_la - 130)) & ((1L << (IDENTIFIER - 130)) | (1L << (DIGIT_IDENTIFIER - 130)) | (1L << (QUOTED_IDENTIFIER - 130)) | (1L << (BACKQUOTED_IDENTIFIER - 130)))) != 0)) { { @@ -4760,6 +4872,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _la = _input.LA(1); } while ( _la==WHEN ); setState(664); + _errHandler.sync(this); _la = _input.LA(1); if (_la==ELSE) { { @@ -4842,6 +4955,7 @@ public final BuiltinDateTimeFunctionContext builtinDateTimeFunction() throws Rec enterRule(_localctx, 72, RULE_builtinDateTimeFunction); try { setState(681); + _errHandler.sync(this); switch (_input.LA(1)) { case CURRENT_TIMESTAMP: enterOuterAlt(_localctx, 1); @@ -5110,6 +5224,7 @@ public final ExtractExpressionContext extractExpression() throws RecognitionExce enterRule(_localctx, 80, RULE_extractExpression); try { setState(714); + _errHandler.sync(this); switch (_input.LA(1)) { case EXTRACT: enterOuterAlt(_localctx, 1); @@ -5234,6 +5349,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx enterRule(_localctx, 84, RULE_functionExpression); try { setState(728); + _errHandler.sync(this); switch (_input.LA(1)) { case ANALYZE: case ANALYZED: @@ -5358,10 +5474,12 @@ public final FunctionTemplateContext functionTemplate() throws RecognitionExcept setState(731); match(T__0); setState(743); + _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & ((1L << (NOT - 66)) | (1L << (NULL - 66)) | (1L << (OPTIMIZED - 66)) | (1L << (PARSED - 66)) | (1L << (PHYSICAL - 66)) | (1L << (PIVOT - 66)) | (1L << (PLAN - 66)) | (1L << (RIGHT - 66)) | (1L << (RLIKE - 66)) | (1L << (QUERY - 66)) | (1L << (SCHEMAS - 66)) | (1L << (SECOND - 66)) | (1L << (SHOW - 66)) | (1L << (SYS - 66)) | (1L << (TABLES - 66)) | (1L << (TEXT - 66)) | (1L << (TRUE - 66)) | (1L << (TOP - 66)) | (1L << (TYPE - 66)) | (1L << (TYPES - 66)) | (1L << (VERIFY - 66)) | (1L << (YEAR - 66)) | (1L << (FUNCTION_ESC - 66)) | (1L << (DATE_ESC - 66)) | (1L << (TIME_ESC - 66)) | (1L << (TIMESTAMP_ESC - 66)) | (1L << (GUID_ESC - 66)) | (1L << (PLUS - 66)) | (1L << (MINUS - 66)) | (1L << (ASTERISK - 66)) | (1L << (PARAM - 66)) | (1L << (STRING - 66)) | (1L << (INTEGER_VALUE - 66)) | (1L << (DECIMAL_VALUE - 66)))) != 0) || ((((_la - 130)) & ~0x3f) == 0 && ((1L << (_la - 130)) & ((1L << (IDENTIFIER - 130)) | (1L << (DIGIT_IDENTIFIER - 130)) | (1L << (QUOTED_IDENTIFIER - 130)) | (1L << (BACKQUOTED_IDENTIFIER - 130)))) != 0)) { { setState(733); + _errHandler.sync(this); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { @@ -5436,6 +5554,7 @@ public final FunctionNameContext functionName() throws RecognitionException { enterRule(_localctx, 88, RULE_functionName); try { setState(750); + _errHandler.sync(this); switch (_input.LA(1)) { case LEFT: enterOuterAlt(_localctx, 1); @@ -5730,6 +5849,7 @@ public final ConstantContext constant() throws RecognitionException { try { int _alt; setState(778); + _errHandler.sync(this); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); @@ -5900,7 +6020,10 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx _la = _input.LA(1); if ( !(((((_la - 112)) & ~0x3f) == 0 && ((1L << (_la - 112)) & ((1L << (EQ - 112)) | (1L << (NULLEQ - 112)) | (1L << (NEQ - 112)) | (1L << (LT - 112)) | (1L << (LTE - 112)) | (1L << (GT - 112)) | (1L << (GTE - 112)))) != 0)) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -5949,7 +6072,10 @@ public final BooleanValueContext booleanValue() throws RecognitionException { _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -6016,6 +6142,7 @@ public final IntervalContext interval() throws RecognitionException { setState(784); match(INTERVAL); setState(786); + _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { @@ -6024,13 +6151,17 @@ public final IntervalContext interval() throws RecognitionException { _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { ((IntervalContext)_localctx).sign = (Token)_errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } } setState(790); + _errHandler.sync(this); switch (_input.LA(1)) { case INTEGER_VALUE: case DECIMAL_VALUE: @@ -6119,7 +6250,10 @@ public final IntervalFieldContext intervalField() throws RecognitionException { _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << DAY) | (1L << DAYS) | (1L << HOUR) | (1L << HOURS) | (1L << MINUTE) | (1L << MINUTES) | (1L << MONTH))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (MONTHS - 64)) | (1L << (SECOND - 64)) | (1L << (SECONDS - 64)) | (1L << (YEAR - 64)) | (1L << (YEARS - 64)))) != 0)) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -6289,6 +6423,7 @@ public final IdentifierContext identifier() throws RecognitionException { enterRule(_localctx, 104, RULE_identifier); try { setState(813); + _errHandler.sync(this); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: @@ -6403,6 +6538,7 @@ public final TableIdentifierContext tableIdentifier() throws RecognitionExceptio enterOuterAlt(_localctx, 1); { setState(818); + _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TOP - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)) | (1L << (IDENTIFIER - 70)) | (1L << (DIGIT_IDENTIFIER - 70)) | (1L << (QUOTED_IDENTIFIER - 70)))) != 0) || _la==BACKQUOTED_IDENTIFIER) { { @@ -6500,6 +6636,7 @@ public final QuoteIdentifierContext quoteIdentifier() throws RecognitionExceptio enterRule(_localctx, 108, RULE_quoteIdentifier); try { setState(831); + _errHandler.sync(this); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); @@ -6586,6 +6723,7 @@ public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionExce enterRule(_localctx, 110, RULE_unquoteIdentifier); try { setState(836); + _errHandler.sync(this); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); @@ -6717,6 +6855,7 @@ public final NumberContext number() throws RecognitionException { enterRule(_localctx, 112, RULE_number); try { setState(840); + _errHandler.sync(this); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); @@ -6782,7 +6921,10 @@ public final StringContext string() throws RecognitionException { _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -6927,7 +7069,10 @@ public final NonReservedContext nonReserved() throws RecognitionException { _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TOP - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)))) != 0)) ) { _errHandler.recoverInline(this); - } else { + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); consume(); } } @@ -6983,7 +7128,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\u008d\u0356\4\2\t"+ + "\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\u008d\u0356\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -7050,11 +7195,11 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\2**\63\63\3\2\34\35\3\2yz\4\2\7\7\u0082\u0082\4\2\r\r\34\34\4\2\'\'9"+ "9\4\2\7\7\36\36\3\2{}\3\2rx\4\2&&]]\7\2\31\32\61\62?BTUgh\3\2\u0080\u0081"+ "\31\2\b\t\23\24\26\31\33\33\"\"$$\'\')),.\61\61\66\6699<=??AAHHLOQTWX"+ - "Z[_accgg\u03b9\2z\3\2\2\2\4}\3\2\2\2\6\u00f1\3\2\2\2\b\u00fc\3\2\2\2\n"+ - "\u0100\3\2\2\2\f\u0115\3\2\2\2\16\u011c\3\2\2\2\20\u011e\3\2\2\2\22\u0126"+ - "\3\2\2\2\24\u013e\3\2\2\2\26\u014b\3\2\2\2\30\u0155\3\2\2\2\32\u0164\3"+ - "\2\2\2\34\u0166\3\2\2\2\36\u016c\3\2\2\2 \u016f\3\2\2\2\"\u0171\3\2\2"+ - "\2$\u0179\3\2\2\2&\u0180\3\2\2\2(\u0192\3\2\2\2*\u01a3\3\2\2\2,\u01b3"+ + "Z[_accgg\2\u03b9\2z\3\2\2\2\4}\3\2\2\2\6\u00f1\3\2\2\2\b\u00fc\3\2\2\2"+ + "\n\u0100\3\2\2\2\f\u0115\3\2\2\2\16\u011c\3\2\2\2\20\u011e\3\2\2\2\22"+ + "\u0126\3\2\2\2\24\u013e\3\2\2\2\26\u014b\3\2\2\2\30\u0155\3\2\2\2\32\u0164"+ + "\3\2\2\2\34\u0166\3\2\2\2\36\u016c\3\2\2\2 \u016f\3\2\2\2\"\u0171\3\2"+ + "\2\2$\u0179\3\2\2\2&\u0180\3\2\2\2(\u0192\3\2\2\2*\u01a3\3\2\2\2,\u01b3"+ "\3\2\2\2.\u01d1\3\2\2\2\60\u01d3\3\2\2\2\62\u01de\3\2\2\2\64\u01e6\3\2"+ "\2\2\66\u01ed\3\2\2\28\u020e\3\2\2\2:\u021f\3\2\2\2<\u0222\3\2\2\2>\u0254"+ "\3\2\2\2@\u0256\3\2\2\2B\u0259\3\2\2\2D\u0263\3\2\2\2F\u0269\3\2\2\2H"+ From 8759c85a68dae2cd895771f2a0335cea16c6fbae Mon Sep 17 00:00:00 2001 From: William Brafford Date: Thu, 26 Aug 2021 08:56:13 -0400 Subject: [PATCH 019/128] Allow access to restricted system indices for reserved system roles (#76845) * Add system index patterns to TestRestrictedIndices A missing piece in #74212 was system index patterns in the tests for the ReservedRolesStore. Without these patterns, the tests did not accurately check whether a role was incorrectly accessing a system index that was not previously a restricted index. This commit adds all of the current system index patterns to the test class and adds restricted index access to the system roles that need it for tests to pass. * Preserve existing Kibana data telemetry privileges * Test that data telemetry can't access security and async indices --- .../authz/store/ReservedRolesStore.java | 24 ++- .../authz/store/ReservedRolesStoreTests.java | 16 +- .../security/test/TestRestrictedIndices.java | 168 ++++++++++++++---- 3 files changed, 159 insertions(+), 49 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 11ecb17b12f93..bd2de247fc245 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -94,7 +94,7 @@ private static Map initializeReservedRoles() { RoleDescriptor.IndicesPrivileges.builder() .indices("*").privileges("monitor").allowRestrictedIndices(true).build(), RoleDescriptor.IndicesPrivileges.builder() - .indices(".kibana*").privileges("read").build() + .indices(".kibana*").privileges("read").allowRestrictedIndices(true).build() }, null, null, @@ -199,6 +199,7 @@ private static Map initializeReservedRoles() { new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder() .indices(".ml-anomalies*", ".ml-notifications*", ".ml-state*", ".ml-meta*", ".ml-stats-*") + .allowRestrictedIndices(true) // .ml-meta is a restricted index .privileges("view_index_metadata", "read").build(), RoleDescriptor.IndicesPrivileges.builder().indices(".ml-annotations*") .privileges("view_index_metadata", "read", "write").build() @@ -258,12 +259,13 @@ private static Map initializeReservedRoles() { .put("watcher_admin", new RoleDescriptor("watcher_admin", new String[] { "manage_watcher" }, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(Watch.INDEX, TriggeredWatchStoreField.INDEX_NAME, - HistoryStoreField.INDEX_PREFIX + "*").privileges("read").build() }, + HistoryStoreField.INDEX_PREFIX + "*").privileges("read").allowRestrictedIndices(true).build() }, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put("watcher_user", new RoleDescriptor("watcher_user", new String[] { "monitor_watcher" }, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(Watch.INDEX) .privileges("read") + .allowRestrictedIndices(true) .build(), RoleDescriptor.IndicesPrivileges.builder().indices(HistoryStoreField.INDEX_PREFIX + "*") .privileges("read") @@ -272,6 +274,7 @@ private static Map initializeReservedRoles() { new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".logstash*") .privileges("create", "delete", "index", "manage", "read") + .allowRestrictedIndices(true) .build() }, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put("rollup_user", new RoleDescriptor("rollup_user", new String[] { "monitor_rollup" }, @@ -369,8 +372,9 @@ public static RoleDescriptor kibanaSystemRoleDescriptor(String name) { "cancel_task" }, new RoleDescriptor.IndicesPrivileges[] { + // System indices defined in KibanaPlugin RoleDescriptor.IndicesPrivileges.builder() - .indices(".kibana*", ".reporting-*").privileges("all").build(), + .indices(".kibana*", ".reporting-*").privileges("all").allowRestrictedIndices(true).build(), RoleDescriptor.IndicesPrivileges.builder() .indices(".monitoring-*").privileges("read", "read_cross_cluster").build(), RoleDescriptor.IndicesPrivileges.builder() @@ -381,19 +385,20 @@ public static RoleDescriptor kibanaSystemRoleDescriptor(String name) { .privileges("read").build(), RoleDescriptor.IndicesPrivileges.builder().indices(".ml-annotations*", ".ml-notifications*") .privileges("read", "write").build(), - // APM agent configuration + // APM agent configuration - system index defined in KibanaPlugin RoleDescriptor.IndicesPrivileges.builder() - .indices(".apm-agent-configuration").privileges("all").build(), - // APM custom link index creation + .indices(".apm-agent-configuration").privileges("all").allowRestrictedIndices(true).build(), + // APM custom link index creation - system index defined in KibanaPlugin RoleDescriptor.IndicesPrivileges.builder() - .indices(".apm-custom-link").privileges("all").build(), + .indices(".apm-custom-link").privileges("all").allowRestrictedIndices(true).build(), // APM telemetry queries APM indices in kibana task runner RoleDescriptor.IndicesPrivileges.builder() .indices("apm-*") .privileges("read", "read_cross_cluster").build(), - // Data telemetry reads mappings, metadata and stats of indices + // Data telemetry reads mappings, metadata and stats of indices (excluding security and async search indices) RoleDescriptor.IndicesPrivileges.builder() - .indices("*") + .indices("/@&~(\\.security.*)&~(\\.async-search.*)/") + .allowRestrictedIndices(true) .privileges("view_index_metadata", "monitor").build(), // Endpoint diagnostic information. Kibana reads from these indices to send telemetry RoleDescriptor.IndicesPrivileges.builder() @@ -403,6 +408,7 @@ public static RoleDescriptor kibanaSystemRoleDescriptor(String name) { // Fleet Server indices. Kibana read and write to this indice to manage Elastic Agents RoleDescriptor.IndicesPrivileges.builder() .indices(".fleet*") + .allowRestrictedIndices(true) .privileges("all").build(), // Legacy "Alerts as data" used in Security Solution. // Kibana user creates these indices; reads / writes to them. diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index f6feaa04c50de..5651b254ead84 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -532,7 +532,10 @@ public void testKibanaSystemRole() { // Data telemetry reads mappings, metadata and stats of indices - Arrays.asList(randomAlphaOfLengthBetween(8, 24), "packetbeat-*", "logs-*").forEach((index) -> { + Arrays.asList(randomAlphaOfLengthBetween(8, 24), "packetbeat-*", "logs-*", + // check system indices other than .security* and .async-search* + ".watches", ".triggered-watches", ".tasks", ".enrich" + ).forEach((index) -> { logger.info("index name [{}]", index); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetMappingsAction.NAME).test(mockIndexAbstraction(index)), is(true)); @@ -550,6 +553,17 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(mockIndexAbstraction(index)), is(false)); }); + // Data telemetry does not have access to security and async search + RestrictedIndicesNames.RESTRICTED_NAMES.forEach((index) -> { + logger.info("index name [{}]", index); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetMappingsAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(IndicesStatsAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(mockIndexAbstraction(index)), is(false)); + }); + // read-only datastream for Endpoint policy responses Arrays.asList("metrics-endpoint.policy-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((index) -> { assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/test/TestRestrictedIndices.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/test/TestRestrictedIndices.java index 98a8991bb7228..bc81538a9fdbb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/test/TestRestrictedIndices.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/test/TestRestrictedIndices.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -39,51 +40,140 @@ public class TestRestrictedIndices { public static final IndexNameExpressionResolver RESOLVER; static { - SystemIndices systemIndices = new SystemIndices(Map.of( - "security-mock", + Map featureMap = new HashMap<>(); + featureMap.put("security-mock", new Feature("security-mock", "fake security for test restricted indices", List.of( - SystemIndexDescriptor.builder() - // This can't just be `.security-*` because that would overlap with the tokens index pattern - .setIndexPattern(".security-[0-9]+") - .setPrimaryIndex(RestrictedIndicesNames.INTERNAL_SECURITY_MAIN_INDEX_7) - .setDescription("Contains Security configuration") - .setMappings(mockMappings()) - .setSettings(Settings.EMPTY) - .setAliasName(SECURITY_MAIN_ALIAS) - .setIndexFormat(7) - .setVersionMetaKey("version") - .setOrigin(SECURITY_ORIGIN) - .setThreadPools(ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS) - .build(), - SystemIndexDescriptor.builder() - .setIndexPattern(".security-tokens-[0-9]+") - .setPrimaryIndex(RestrictedIndicesNames.INTERNAL_SECURITY_TOKENS_INDEX_7) - .setDescription("Contains auth token data") - .setMappings(mockMappings()) - .setSettings(Settings.EMPTY) - .setAliasName(SECURITY_TOKENS_ALIAS) - .setIndexFormat(7) - .setVersionMetaKey("version") - .setOrigin(SECURITY_ORIGIN) - .setThreadPools(ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS) - .build() - )), - "async-search-mock", + getMainSecurityDescriptor(), + getSecurityTokensDescriptor()))); + featureMap.put("async-search-mock", new Feature("async search mock", "fake async search for restricted indices", List.of( - SystemIndexDescriptor.builder() - .setIndexPattern(XPackPlugin.ASYNC_RESULTS_INDEX + "*") - .setDescription("Async search results") - .setPrimaryIndex(XPackPlugin.ASYNC_RESULTS_INDEX) - .setMappings(mockMappings()) - .setSettings(Settings.EMPTY) - .setVersionMetaKey("version") - .setOrigin(ASYNC_SEARCH_ORIGIN) - .build() - )))); + getAsyncSearchDescriptor()))); + featureMap.put("kibana-mock", + new Feature("kibana-mock", "fake kibana for testing restricted indices", List.of( + getKibanaSavedObjectsDescriptor(), + getReportingIndexDescriptor(), + getApmAgentConfigDescriptor(), + getApmCustomLinkDescriptor()))); + + // From here, we have very minimal mock features that only supply system index patterns, + // not settings or mock mappings. + featureMap.put("enrich-mock", + new Feature("enrich-mock", "fake enrich for restricted indices tests", List.of( + new SystemIndexDescriptor(".enrich-*", "enrich pattern")))); + featureMap.put("fleet-mock", + new Feature("fleet-mock", "fake fleet for restricted indices tests", List.of( + new SystemIndexDescriptor(".fleet-actions~(-results*)", "fleet actions"), + new SystemIndexDescriptor(".fleet-agents*", "fleet agents"), + new SystemIndexDescriptor(".fleet-enrollment-api-keys*", "fleet enrollment"), + new SystemIndexDescriptor(".fleet-policies-[0-9]+", "fleet policies"), + new SystemIndexDescriptor(".fleet-policies-leader*", "fleet policies leader"), + new SystemIndexDescriptor(".fleet-servers*", "fleet servers"), + new SystemIndexDescriptor(".fleet-artifacts*", "fleet artifacts")))); + featureMap.put("ingest-geoip-mock", + new Feature("ingest-geoip-mock", "fake geoip for restricted indices tests", List.of( + new SystemIndexDescriptor(".geoip_databases", "geoip databases")))); + featureMap.put("logstash-mock", + new Feature("logstash-mock", "fake logstash for restricted indices tests", List.of( + new SystemIndexDescriptor(".logstash", "logstash")))); + featureMap.put("machine-learning-mock", + new Feature("machine-learning-mock", "fake machine learning for restricted indices tests", List.of( + new SystemIndexDescriptor(".ml-meta*", "machine learning meta"), + new SystemIndexDescriptor(".ml-config*", "machine learning config"), + new SystemIndexDescriptor(".ml-inference*", "machine learning inference")))); + featureMap.put("searchable-snapshots-mock", + new Feature("searchable-snapshots-mock", "fake searchable snapshots for restricted indices tests", List.of( + new SystemIndexDescriptor(".snapshot-blob-cache", "snapshot blob cache")))); + featureMap.put("transform-mock", + new Feature("transform-mock", "fake transform for restricted indices tests", List.of( + new SystemIndexDescriptor(".transform-internal-*", "transform internal")))); + featureMap.put("watcher-mock", + new Feature("watcher-mock", "fake watcher for restricted indices tests", List.of( + new SystemIndexDescriptor(".watches*", "watches"), + new SystemIndexDescriptor(".triggered-watches*", "triggered watches")))); + + SystemIndices systemIndices = new SystemIndices(featureMap); RESTRICTED_INDICES_AUTOMATON = systemIndices.getSystemNameAutomaton(); RESOLVER = TestIndexNameExpressionResolver.newInstance(systemIndices); } + private static SystemIndexDescriptor.Builder getInitializedDescriptorBuilder() { + return SystemIndexDescriptor.builder() + .setMappings(mockMappings()) + .setSettings(Settings.EMPTY) + .setVersionMetaKey("version"); + } + + private static SystemIndexDescriptor getMainSecurityDescriptor() { + return getInitializedDescriptorBuilder() + // This can't just be `.security-*` because that would overlap with the tokens index pattern + .setIndexPattern(".security-[0-9]+") + .setPrimaryIndex(RestrictedIndicesNames.INTERNAL_SECURITY_MAIN_INDEX_7) + .setDescription("Contains Security configuration") + .setAliasName(SECURITY_MAIN_ALIAS) + .setIndexFormat(7) + .setOrigin(SECURITY_ORIGIN) + .setThreadPools(ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS) + .build(); + } + + private static SystemIndexDescriptor getSecurityTokensDescriptor() { + return getInitializedDescriptorBuilder() + .setIndexPattern(".security-tokens-[0-9]+") + .setPrimaryIndex(RestrictedIndicesNames.INTERNAL_SECURITY_TOKENS_INDEX_7) + .setDescription("Contains auth token data") + .setAliasName(SECURITY_TOKENS_ALIAS) + .setIndexFormat(7) + .setOrigin(SECURITY_ORIGIN) + .setThreadPools(ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS) + .build(); + } + + private static SystemIndexDescriptor getAsyncSearchDescriptor() { + return getInitializedDescriptorBuilder() + .setIndexPattern(XPackPlugin.ASYNC_RESULTS_INDEX + "*") + .setDescription("Async search results") + .setPrimaryIndex(XPackPlugin.ASYNC_RESULTS_INDEX) + .setOrigin(ASYNC_SEARCH_ORIGIN) + .build(); + } + + private static SystemIndexDescriptor getKibanaSavedObjectsDescriptor() { + return SystemIndexDescriptor.builder() + .setIndexPattern(".kibana_*") + .setDescription("Kibana saved objects system index") + .setAliasName(".kibana") + .setType(SystemIndexDescriptor.Type.EXTERNAL_UNMANAGED) + .setAllowedElasticProductOrigins( List.of("kibana")) + .build(); + } + + private static SystemIndexDescriptor getReportingIndexDescriptor() { + return SystemIndexDescriptor.builder() + .setIndexPattern(".reporting-*") + .setDescription("system index for reporting") + .setType(SystemIndexDescriptor.Type.EXTERNAL_UNMANAGED) + .setAllowedElasticProductOrigins(List.of("kibana")) + .build(); + } + + private static SystemIndexDescriptor getApmAgentConfigDescriptor() { + return SystemIndexDescriptor.builder() + .setIndexPattern(".apm-agent-configuration") + .setDescription("system index for APM agent configuration") + .setType(SystemIndexDescriptor.Type.EXTERNAL_UNMANAGED) + .setAllowedElasticProductOrigins(List.of("kibana")) + .build(); + } + + private static SystemIndexDescriptor getApmCustomLinkDescriptor() { + return SystemIndexDescriptor.builder() + .setIndexPattern(".apm-custom-link") + .setDescription("system index for APM custom links") + .setType(SystemIndexDescriptor.Type.EXTERNAL_UNMANAGED) + .setAllowedElasticProductOrigins(List.of("kibana")) + .build(); + } + private TestRestrictedIndices() {} private static XContentBuilder mockMappings() { From 3c737341fe1e5d57bba618f9e01c01e0d5b8958c Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Thu, 26 Aug 2021 09:18:42 -0500 Subject: [PATCH 020/128] Script: Fields API converter tests (#76900) --- .../test/painless/40_fields_api.yml | 34 +++ .../index/fielddata/ScriptDocValues.java | 15 +- .../org/elasticsearch/script/Converters.java | 181 ++++++++--- .../elasticsearch/script/ConvertersTests.java | 283 ++++++++++++++++++ 4 files changed, 458 insertions(+), 55 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/script/ConvertersTests.java diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/40_fields_api.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/40_fields_api.yml index df571349ecb97..eb02667536104 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/40_fields_api.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/40_fields_api.yml @@ -63,6 +63,40 @@ setup: - match: { hits.hits.1._id: d1 } - match: { hits.hits.2._id: d2 } +--- +"script fields api for dates": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 2 + mappings: + properties: + dt: + type: date_nanos + - do: + index: + index: test + id: d1 + body: {"dt": "2021-08-24T18:45:52.123456789Z" } + - do: + indices.refresh: {} + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: { "match_all": {} } + sort: [ { dt: asc } ] + script_fields: + date_field: + script: + source: "field('dt').getLong(100L)" + - match: { hits.total: 1 } + - match: { hits.hits.0._id: d1 } + - match: { hits.hits.0.fields.date_field.0: 1629830752123456789 } + --- "script score fields api": - do: diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java index dff93fa777402..37b1dad95ffaa 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.geometry.utils.Geohash; +import org.elasticsearch.script.Converters; import org.elasticsearch.script.Field; import org.elasticsearch.script.FieldValues; import org.elasticsearch.script.InvalidConversion; @@ -25,7 +26,6 @@ import java.io.IOException; import java.time.Instant; import java.time.ZoneOffset; -import java.time.temporal.ChronoUnit; import java.util.AbstractList; import java.util.Arrays; import java.util.Comparator; @@ -240,11 +240,10 @@ void refreshArray() throws IOException { @Override public long getLongValue() { throwIfEmpty(); - Instant dt = dates[0].toInstant(); if (isNanos) { - return ChronoUnit.NANOS.between(java.time.Instant.EPOCH, dt); + return Converters.convertDateNanosToLong(dates[0]); } - return dt.toEpochMilli(); + return Converters.convertDateMillisToLong(dates[0]); } @Override @@ -587,13 +586,13 @@ private static boolean[] grow(boolean[] array, int minSize) { @Override public long getLongValue() { throwIfEmpty(); - return values[0] ? 1L : 0L; + return Converters.convertBooleanToLong(values[0]); } @Override public double getDoubleValue() { throwIfEmpty(); - return values[0] ? 1.0D : 0.0D; + return Converters.convertBooleanToDouble(values[0]); } @Override @@ -675,12 +674,12 @@ public final String getValue() { @Override public long getLongValue() { - return Long.parseLong(get(0)); + return Converters.convertStringToLong(get(0)); } @Override public double getDoubleValue() { - return Double.parseDouble(get(0)); + return Converters.convertStringToDouble(get(0)); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/Converters.java b/server/src/main/java/org/elasticsearch/script/Converters.java index fa96c4dc5b827..79e749725daae 100644 --- a/server/src/main/java/org/elasticsearch/script/Converters.java +++ b/server/src/main/java/org/elasticsearch/script/Converters.java @@ -48,31 +48,31 @@ public class Converters { static final Converter LONG; static { - BIGINTEGER = new Converter<>() { + BIGINTEGER = new Converter() { @Override public BigIntegerField convert(Field sourceField) { if (sourceField instanceof LongField) { - return LongToBigInteger((LongField) sourceField); + return convertLongToBigIntegerField((LongField) sourceField); } if (sourceField instanceof DoubleField) { - return DoubleToBigInteger((DoubleField) sourceField); + return convertDoubleToBigIntegerField((DoubleField) sourceField); } if (sourceField instanceof StringField) { - return StringToBigInteger((StringField) sourceField); + return convertStringToBigIntegerField((StringField) sourceField); } if (sourceField instanceof DateMillisField) { - return LongToBigInteger(DateMillisToLong((DateMillisField) sourceField)); + return convertLongToBigIntegerField(convertDateMillisToLongField((DateMillisField) sourceField)); } if (sourceField instanceof DateNanosField) { - return LongToBigInteger(DateNanosToLong((DateNanosField) sourceField)); + return convertLongToBigIntegerField(convertDateNanosToLongField((DateNanosField) sourceField)); } if (sourceField instanceof BooleanField) { - return LongToBigInteger(BooleanToLong((BooleanField) sourceField)); + return convertLongToBigIntegerField(convertBooleanToLongField((BooleanField) sourceField)); } throw new InvalidConversion(sourceField.getClass(), getFieldClass()); @@ -89,31 +89,31 @@ public Class getTargetClass() { } }; - LONG = new Converter<>() { + LONG = new Converter() { @Override public LongField convert(Field sourceField) { if (sourceField instanceof DoubleField) { - return DoubleToLong((DoubleField) sourceField); + return convertDoubleToLongField((DoubleField) sourceField); } if (sourceField instanceof StringField) { - return StringToLong((StringField) sourceField); + return convertStringToLongField((StringField) sourceField); } if (sourceField instanceof DateMillisField) { - return DateMillisToLong((DateMillisField) sourceField); + return convertDateMillisToLongField((DateMillisField) sourceField); } if (sourceField instanceof DateNanosField) { - return DateNanosToLong((DateNanosField) sourceField); + return convertDateNanosToLongField((DateNanosField) sourceField); } if (sourceField instanceof BigIntegerField) { - return BigIntegerToLong((BigIntegerField) sourceField); + return convertBigIntegerToLongField((BigIntegerField) sourceField); } if (sourceField instanceof BooleanField) { - return BooleanToLong((BooleanField) sourceField); + return convertBooleanToLongField((BooleanField) sourceField); } throw new InvalidConversion(sourceField.getClass(), getFieldClass()); @@ -134,9 +134,9 @@ public Class getTargetClass() { // No instances, please private Converters() {} - static BigIntegerField LongToBigInteger(LongField sourceField) { + static BigIntegerField convertLongToBigIntegerField(LongField sourceField) { FieldValues fv = sourceField.getFieldValues(); - return new BigIntegerField(sourceField.getName(), new DelegatingFieldValues<>(fv) { + return new BigIntegerField(sourceField.getName(), new DelegatingFieldValues(fv) { @Override public List getValues() { return values.getValues().stream().map(BigInteger::valueOf).collect(Collectors.toList()); @@ -149,48 +149,48 @@ public BigInteger getNonPrimitiveValue() { }); } - static BigIntegerField DoubleToBigInteger(DoubleField sourceField) { + static BigIntegerField convertDoubleToBigIntegerField(DoubleField sourceField) { FieldValues fv = sourceField.getFieldValues(); - return new BigIntegerField(sourceField.getName(), new DelegatingFieldValues<>(fv) { + return new BigIntegerField(sourceField.getName(), new DelegatingFieldValues(fv) { @Override public List getValues() { - return values.getValues().stream().map( - dbl -> BigInteger.valueOf(dbl.longValue()) - ).collect(Collectors.toList()); + return values.getValues().stream().map(Converters::convertDoubleToBigInteger).collect(Collectors.toList()); } @Override public BigInteger getNonPrimitiveValue() { - return BigInteger.valueOf(values.getLongValue()); + return convertDoubleToBigInteger(values.getDoubleValue()); } }); } - static BigIntegerField StringToBigInteger(StringField sourceField) { + static BigIntegerField convertStringToBigIntegerField(StringField sourceField) { FieldValues fv = sourceField.getFieldValues(); return new BigIntegerField(sourceField.getName(), new DelegatingFieldValues(fv) { - protected BigInteger parseNumber(String str) { - try { - return new BigInteger(str); - } catch (NumberFormatException e) { - return new BigDecimal(str).toBigInteger(); - } - } - @Override public List getValues() { - // TODO(stu): this may throw - return values.getValues().stream().map(this::parseNumber).collect(Collectors.toList()); + // This may throw NumberFormatException, should we catch and truncate the List? (#76951) + return values.getValues().stream().map(Converters::convertStringToBigInteger).collect(Collectors.toList()); } @Override public BigInteger getNonPrimitiveValue() { - return parseNumber(values.getNonPrimitiveValue()); + return convertStringToBigInteger(values.getNonPrimitiveValue()); + } + + @Override + public long getLongValue() { + return getNonPrimitiveValue().longValue(); + } + + @Override + public double getDoubleValue() { + return getNonPrimitiveValue().doubleValue(); } }); } - static LongField BigIntegerToLong(BigIntegerField sourceField) { + static LongField convertBigIntegerToLongField(BigIntegerField sourceField) { FieldValues fv = sourceField.getFieldValues(); return new LongField(sourceField.getName(), new DelegatingFieldValues(fv) { @Override @@ -200,12 +200,22 @@ public List getValues() { @Override public Long getNonPrimitiveValue() { - return values.getLongValue(); + return convertBigIntegerToLong(values.getNonPrimitiveValue()); + } + + @Override + public long getLongValue() { + return convertBigIntegerToLong(values.getNonPrimitiveValue()); + } + + @Override + public double getDoubleValue() { + return convertBigIntegerToLong(values.getNonPrimitiveValue()); } }); } - static LongField BooleanToLong(BooleanField sourceField) { + static LongField convertBooleanToLongField(BooleanField sourceField) { FieldValues fv = sourceField.getFieldValues(); return new LongField(sourceField.getName(), new DelegatingFieldValues(fv) { @Override @@ -215,12 +225,22 @@ public List getValues() { @Override public Long getNonPrimitiveValue() { - return getLongValue(); + return convertBooleanToLong(values.getNonPrimitiveValue()); + } + + @Override + public long getLongValue() { + return convertBooleanToLong(values.getNonPrimitiveValue()); + } + + @Override + public double getDoubleValue() { + return convertBooleanToLong(values.getNonPrimitiveValue()); } }); } - static LongField DateMillisToLong(DateMillisField sourceField) { + static LongField convertDateMillisToLongField(DateMillisField sourceField) { FieldValues fv = sourceField.getFieldValues(); return new LongField(sourceField.getName(), new DelegatingFieldValues(fv) { @Override @@ -230,12 +250,22 @@ public List getValues() { @Override public Long getNonPrimitiveValue() { - return values.getNonPrimitiveValue().toInstant().toEpochMilli(); + return convertDateMillisToLong(values.getNonPrimitiveValue()); + } + + @Override + public long getLongValue() { + return convertDateMillisToLong(values.getNonPrimitiveValue()); + } + + @Override + public double getDoubleValue() { + return convertDateMillisToLong(values.getNonPrimitiveValue()); } }); } - static LongField DateNanosToLong(DateNanosField sourceField) { + static LongField convertDateNanosToLongField(DateNanosField sourceField) { FieldValues fv = sourceField.getFieldValues(); return new LongField(sourceField.getName(), new DelegatingFieldValues(fv) { protected long nanoLong(JodaCompatibleZonedDateTime dt) { @@ -249,12 +279,22 @@ public List getValues() { @Override public Long getNonPrimitiveValue() { - return ChronoUnit.NANOS.between(Instant.EPOCH, values.getNonPrimitiveValue().toInstant()); + return convertDateNanosToLong(values.getNonPrimitiveValue()); + } + + @Override + public long getLongValue() { + return convertDateNanosToLong(values.getNonPrimitiveValue()); + } + + @Override + public double getDoubleValue() { + return convertDateNanosToLong(values.getNonPrimitiveValue()); } }); } - static LongField DoubleToLong(DoubleField sourceField) { + static LongField convertDoubleToLongField(DoubleField sourceField) { FieldValues fv = sourceField.getFieldValues(); return new LongField(sourceField.getName(), new DelegatingFieldValues(fv) { @Override @@ -269,7 +309,7 @@ public Long getNonPrimitiveValue() { }); } - static LongField StringToLong(StringField sourceField) { + static LongField convertStringToLongField(StringField sourceField) { FieldValues fv = sourceField.getFieldValues(); return new LongField(sourceField.getName(), new DelegatingFieldValues(fv) { @Override @@ -279,21 +319,68 @@ public List getValues() { @Override public Long getNonPrimitiveValue() { - return Long.parseLong(values.getNonPrimitiveValue()); + return convertStringToLong(values.getNonPrimitiveValue()); } @Override public long getLongValue() { - return Long.parseLong(values.getNonPrimitiveValue()); + return convertStringToLong(values.getNonPrimitiveValue()); } @Override public double getDoubleValue() { - return getLongValue(); + // conversion is to LongField, doesn't make sense to parse a Double out of the String here. + return convertStringToLong(values.getNonPrimitiveValue()); } }); } + public static long convertBigIntegerToLong(BigInteger bigInteger) { + return bigInteger.longValue(); + } + + public static double convertBigIntegerToDouble(BigInteger bigInteger) { + return bigInteger.doubleValue(); + } + + public static long convertBooleanToLong(boolean bool) { + return bool ? 1L : 0L; + } + + public static double convertBooleanToDouble(boolean bool) { + return bool ? 1.0d : 0.0d; + } + + public static long convertDateMillisToLong(JodaCompatibleZonedDateTime dt) { + return dt.toInstant().toEpochMilli(); + } + + public static long convertDateNanosToLong(JodaCompatibleZonedDateTime dt) { + return ChronoUnit.NANOS.between(Instant.EPOCH, dt.toInstant()); + } + + public static BigInteger convertDoubleToBigInteger(double dbl) { + return BigDecimal.valueOf(dbl).toBigInteger(); + } + + // String + public static BigInteger convertStringToBigInteger(String str) { + try { + return new BigInteger(str); + } catch (NumberFormatException e) { + return new BigDecimal(str).toBigInteger(); + } + } + + public static double convertStringToDouble(String str) { + return Double.parseDouble(str); + } + + public static long convertStringToLong(String str) { + return Long.parseLong(str); + } + + /** * Helper for creating {@link Converter} classes which delegates all un-overridden methods to the underlying * {@link FieldValues}. diff --git a/server/src/test/java/org/elasticsearch/script/ConvertersTests.java b/server/src/test/java/org/elasticsearch/script/ConvertersTests.java new file mode 100644 index 0000000000000..ee3ed995c569d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/script/ConvertersTests.java @@ -0,0 +1,283 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.script; + +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.test.ESTestCase; + +import java.math.BigInteger; +import java.time.Instant; +import java.time.ZoneOffset; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.DoubleStream; +import java.util.stream.LongStream; + +public class ConvertersTests extends ESTestCase { + public void testLongToBigIntegerToLong() { + long[] raw = { randomLong(), Long.MIN_VALUE, Long.MAX_VALUE, ((long) Integer.MIN_VALUE - 1), ((long) Integer.MAX_VALUE + 1), + -1L, 0L, 1L }; + Field src = new Field.LongField("", new FieldValues() { + @Override + public boolean isEmpty() { + return false; + } + + @Override + public int size() { + return raw.length; + } + + @Override + public List getValues() { + return LongStream.of(raw).boxed().collect(Collectors.toList()); + } + + @Override + public Long getNonPrimitiveValue() { + return raw[0]; + } + + @Override + public long getLongValue() { + return raw[0]; + } + + @Override + public double getDoubleValue() { + return raw[0]; + } + }); + + Field dst = src.as(Field.BigInteger); + + List expected = LongStream.of(raw).mapToObj(BigInteger::valueOf).collect(Collectors.toList()); + assertEquals(expected, dst.getValues()); + assertEquals(expected.get(0), dst.getValue(null)); + // dst has data so a junk default value should be ignored + assertEquals(raw[0], dst.getLong(10)); + assertEquals((double) raw[0], dst.getDouble(10.0d), 0.1d); + + Field dstLong = dst.as(Field.Long); + assertEquals(LongStream.of(raw).boxed().collect(Collectors.toList()), dstLong.getValues()); + assertEquals(Long.valueOf(raw[0]), dstLong.getValue(null)); + assertEquals(raw[0], dstLong.getLong(10)); + assertEquals((double) raw[0], dstLong.getDouble(10.0d), 0.1d); + } + + public void testDoubleTo() { + double[] raw = { Double.MAX_VALUE, Double.MIN_VALUE, ((double) Float.MAX_VALUE) * 10d, ((double) Float.MIN_VALUE), 0.1d, + Long.MAX_VALUE, Long.MIN_VALUE }; + Field src = new Field.DoubleField("", new FieldValues() { + @Override + public boolean isEmpty() { + return false; + } + + @Override + public int size() { + return raw.length; + } + + @Override + public List getValues() { + return DoubleStream.of(raw).boxed().collect(Collectors.toList()); + } + + @Override + public Double getNonPrimitiveValue() { + return raw[0]; + } + + @Override + public long getLongValue() { + return (long) raw[0]; + } + + @Override + public double getDoubleValue() { + return raw[0]; + } + }); + + Field dst = src.as(Field.BigInteger); + BigInteger maxDouble = new BigInteger("17976931348623157" + "0".repeat(292)); + List expected = List.of(maxDouble, BigInteger.ZERO, new BigInteger("34028234663852886" + "0".repeat(23)), + BigInteger.ZERO, BigInteger.ZERO, + new BigInteger("9223372036854776000"), // Long.MAX_VALUE: 9223372036854775807 + new BigInteger("-9223372036854776000")); // Long.MIN_VALUE: -9223372036854775808 + assertEquals(expected, dst.getValues()); + assertEquals(expected.get(0), dst.getValue(null)); + assertEquals(Long.MAX_VALUE, dst.getLong(10)); + assertEquals(Double.MAX_VALUE, dst.getDouble(10.0d), 0.1d); + + Field lng = src.as(Field.Long); + List lngExpected = List.of(Long.MAX_VALUE, 0L, Long.MAX_VALUE, 0L, 0L, Long.MAX_VALUE, Long.MIN_VALUE); + assertEquals(lngExpected, lng.getValues()); + assertEquals(Long.valueOf(Long.MAX_VALUE), lng.getValue(null)); + assertEquals(Long.MAX_VALUE, lng.getLong(10)); + assertEquals(Double.MAX_VALUE, lng.getDouble(10.0d), 0.1d); + } + + public void testStringToBigInteger() { + List raw = List.of(Long.MAX_VALUE + "0", randomLong() + "", Long.MIN_VALUE + "0", Double.MAX_VALUE + "", + Double.MIN_VALUE + ""); + Field src = new Field.StringField("", new ListFieldValues<>(raw)); + + Field dst = src.as(Field.BigInteger); + BigInteger maxDouble = new BigInteger("17976931348623157" + "0".repeat(292)); + List expected = List.of(new BigInteger(raw.get(0)), new BigInteger(raw.get(1)), new BigInteger(raw.get(2)), maxDouble, + BigInteger.ZERO); + assertEquals(expected, dst.getValues()); + assertEquals(expected.get(0), dst.getValue(null)); + assertEquals(-10L, dst.getLong(10)); // overflow + assertEquals(9.223372036854776E19, dst.getDouble(10.0d), 0.1d); + } + + public void testStringToLong() { + long rand = randomLong(); + List raw = List.of(rand + "", Long.MAX_VALUE + "", Long.MIN_VALUE + "", "0", "100"); + Field src = new Field.StringField("", new ListFieldValues<>(raw)); + + Field dst = src.as(Field.Long); + assertEquals(List.of(rand, Long.MAX_VALUE, Long.MIN_VALUE, 0L, 100L), dst.getValues()); + assertEquals(Long.valueOf(rand), dst.getValue(null)); + assertEquals(rand, dst.getLong(10)); // overflow + assertEquals(rand + 0.0d, dst.getDouble(10.0d), 0.9d); + } + + public void testBooleanTo() { + List raw = List.of(Boolean.TRUE, Boolean.FALSE); + Field src = new Field.BooleanField("", new ListFieldValues<>(raw)); + + Field dst = src.as(Field.BigInteger); + assertEquals(List.of(BigInteger.ONE, BigInteger.ZERO), dst.getValues()); + assertEquals(BigInteger.ONE, dst.getValue(null)); + assertEquals(1L, dst.getLong(10L)); + assertEquals(1.0d, dst.getDouble(1234.0d), 0.1d); + + Field dstLong = src.as(Field.Long); + assertEquals(List.of(1L, 0L), dstLong.getValues()); + assertEquals(Long.valueOf(1), dstLong.getValue(null)); + assertEquals(1L, dstLong.getLong(10L)); + assertEquals(1.0d, dstLong.getDouble(1234.0d), 0.1d); + + List rawRev = List.of(Boolean.FALSE, Boolean.TRUE); + src = new Field.BooleanField("", new ListFieldValues<>(rawRev)); + dst = src.as(Field.BigInteger); + + assertEquals(List.of(BigInteger.ZERO, BigInteger.ONE), dst.getValues()); + assertEquals(BigInteger.ZERO, dst.getValue(null)); + assertEquals(0L, dst.getLong(10L)); + assertEquals(0.0d, dst.getDouble(1234.0d), 0.1d); + + dstLong = src.as(Field.Long); + assertEquals(List.of(0L, 1L), dstLong.getValues()); + assertEquals(Long.valueOf(0), dstLong.getValue(null)); + assertEquals(0L, dstLong.getLong(10L)); + assertEquals(0.0d, dstLong.getDouble(1234.0d), 0.1d); + } + + public void testInvalidFieldConversion() { + Field src = new Field.GeoPointField("", new ListFieldValues<>(List.of(new GeoPoint(0, 0)))); + InvalidConversion ic = expectThrows(InvalidConversion.class, () -> src.as(Field.BigInteger)); + assertEquals("Cannot convert from [GeoPointField] using converter [BigIntegerField]", ic.getMessage()); + + ic = expectThrows(InvalidConversion.class, () -> src.as(Field.Long)); + assertEquals("Cannot convert from [GeoPointField] using converter [LongField]", ic.getMessage()); + } + + public void testDateMillisTo() { + long[] rawMilli = { 1629830752000L, 0L, 2040057952000L, -6106212564000L}; + List raw = List.of( + new JodaCompatibleZonedDateTime(Instant.ofEpochMilli(rawMilli[0]), ZoneOffset.ofHours(-7)), + new JodaCompatibleZonedDateTime(Instant.ofEpochMilli(rawMilli[1]), ZoneOffset.ofHours(-6)), + new JodaCompatibleZonedDateTime(Instant.ofEpochMilli(rawMilli[2]), ZoneOffset.ofHours(0)), + new JodaCompatibleZonedDateTime(Instant.ofEpochMilli(rawMilli[3]), ZoneOffset.ofHours(-5)) + ); + Field src = new Field.DateMillisField("", new ListFieldValues<>(raw)); + + List expectedBigInteger = LongStream.of(rawMilli).mapToObj(BigInteger::valueOf).collect(Collectors.toList()); + Field dstBigInteger = src.as(Field.BigInteger); + assertEquals(expectedBigInteger, dstBigInteger.getValues()); + assertEquals(expectedBigInteger.get(0), dstBigInteger.getValue(null)); + assertEquals(rawMilli[0], dstBigInteger.getLong(-1000L)); + assertEquals((double) rawMilli[0], dstBigInteger.getDouble(-1234.5d), 1.1d); + + Field dstLong = src.as(Field.Long); + assertEquals(LongStream.of(rawMilli).boxed().collect(Collectors.toList()), dstLong.getValues()); + assertEquals(LongStream.of(rawMilli).boxed().collect(Collectors.toList()), dstLong.getValues()); + assertEquals(Long.valueOf(rawMilli[0]), dstLong.getValue(-100L)); + assertEquals(rawMilli[0], dstLong.getLong(-100L)); + assertEquals((double) rawMilli[0], dstLong.getDouble(-1234.5d), 1.1d); + } + + public void testDateNanoTo() { + long[] rawNanos = { 1629830752000123L, 0L, 2040057952000456L, -6106212564000789L}; + List raw = List.of( + new JodaCompatibleZonedDateTime(Instant.EPOCH.plusNanos(rawNanos[0]), ZoneOffset.ofHours(-7)), + new JodaCompatibleZonedDateTime(Instant.EPOCH.plusNanos(rawNanos[1]), ZoneOffset.ofHours(-6)), + new JodaCompatibleZonedDateTime(Instant.EPOCH.plusNanos(rawNanos[2]), ZoneOffset.ofHours(0)), + new JodaCompatibleZonedDateTime(Instant.EPOCH.plusNanos(rawNanos[3]), ZoneOffset.ofHours(-5)) + ); + Field src = new Field.DateNanosField("", new ListFieldValues<>(raw)); + + List expectedBigInteger = LongStream.of(rawNanos).mapToObj(BigInteger::valueOf).collect(Collectors.toList()); + Field dstBigInteger = src.as(Field.BigInteger); + assertEquals(expectedBigInteger, dstBigInteger.getValues()); + assertEquals(expectedBigInteger.get(0), dstBigInteger.getValue(null)); + assertEquals(rawNanos[0], dstBigInteger.getLong(-1000L)); + assertEquals((double) rawNanos[0], dstBigInteger.getDouble(-1234.5d), 1.1d); + + Field dstLong = src.as(Field.Long); + assertEquals(LongStream.of(rawNanos).boxed().collect(Collectors.toList()), dstLong.getValues()); + assertEquals(LongStream.of(rawNanos).boxed().collect(Collectors.toList()), dstLong.getValues()); + assertEquals(Long.valueOf(rawNanos[0]), dstLong.getValue(-100L)); + assertEquals(rawNanos[0], dstLong.getLong(-100L)); + assertEquals((double) rawNanos[0], dstLong.getDouble(-1234.5d), 1.1d); + } + + static class ListFieldValues implements FieldValues { + final List values; + + ListFieldValues(List values) { + this.values = values; + } + + @Override + public boolean isEmpty() { + return values.isEmpty(); + } + + @Override + public int size() { + return values.size(); + } + + @Override + public List getValues() { + return values; + } + + @Override + public T getNonPrimitiveValue() { + return values.get(0); + } + + @Override + public long getLongValue() { + return 0; + } + + @Override + public double getDoubleValue() { + return 0; + } + } +} From 4432b39112f185e01ecbb3b2b928856b3fe7083e Mon Sep 17 00:00:00 2001 From: Howard Date: Thu, 26 Aug 2021 22:36:26 +0800 Subject: [PATCH 021/128] [DOCS] Fix formatting for `snapshot_meta` thread pool (#76973) --- docs/reference/modules/threadpool.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/modules/threadpool.asciidoc b/docs/reference/modules/threadpool.asciidoc index 6575c138747b7..e307a84f27499 100644 --- a/docs/reference/modules/threadpool.asciidoc +++ b/docs/reference/modules/threadpool.asciidoc @@ -44,7 +44,7 @@ There are several thread pools, but the important ones include: `snapshot_meta`:: For snapshot repository metadata read operations. Thread pool type is `scaling` with a keep-alive of `5m` and a max of `min(50, (`<>` pass:[ * ]3))`. + `# of allocated processors`>>`* 3))`. `warmer`:: For segment warm-up operations. Thread pool type is `scaling` with a From 1056c857ee41683e37e5996f85cd581dc8b7539a Mon Sep 17 00:00:00 2001 From: Adam Locke Date: Thu, 26 Aug 2021 13:16:55 -0400 Subject: [PATCH 022/128] [DOCS] Update combined fields wording (#76893) * [DOCS] Update combined fields wording * Clarifications from review feedback --- .../query-dsl/combined-fields-query.asciidoc | 59 ++++++++++--------- .../query-dsl/multi-match-query.asciidoc | 4 +- 2 files changed, 33 insertions(+), 30 deletions(-) diff --git a/docs/reference/query-dsl/combined-fields-query.asciidoc b/docs/reference/query-dsl/combined-fields-query.asciidoc index 42d1f45b0368b..9ea16bf8ef0a7 100644 --- a/docs/reference/query-dsl/combined-fields-query.asciidoc +++ b/docs/reference/query-dsl/combined-fields-query.asciidoc @@ -5,14 +5,14 @@ ++++ The `combined_fields` query supports searching multiple text fields as if their -contents had been indexed into one combined field. It takes a term-centric -view of the query: first it analyzes the query string into individual terms, +contents had been indexed into one combined field. The query takes a term-centric +view of the input string: first it analyzes the query string into individual terms, then looks for each term in any of the fields. This query is particularly useful when a match could span multiple text fields, for example the `title`, -`abstract` and `body` of an article: +`abstract`, and `body` of an article: [source,console] --------------------------------------------------- +---- GET /_search { "query": { @@ -23,31 +23,36 @@ GET /_search } } } --------------------------------------------------- +---- The `combined_fields` query takes a principled approach to scoring based on the simple BM25F formula described in http://www.staff.city.ac.uk/~sb317/papers/foundations_bm25_review.pdf[The Probabilistic Relevance Framework: BM25 and Beyond]. When scoring matches, the query combines term and collection statistics across -fields. This allows it to score each match as if the specified fields had been -indexed into a single combined field. (Note that this is a best attempt -- -`combined_fields` makes some approximations and scores will not obey this -model perfectly.) +fields to score each match as if the specified fields had been indexed into a +single, combined field. This scoring is a best attempt; `combined_fields` makes +some approximations and scores will not obey the BM25F model perfectly. +// tag::max-clause-limit[] [WARNING] .Field number limit =================================================== -There is a limit on the number of fields times terms that can be queried at -once. It is defined by the `indices.query.bool.max_clause_count` -<> which defaults to 4096. +By default, there is a limit to the number of clauses a query can contain. This +limit is defined by the +<> +setting, which defaults to `4096`. For `combined_fields` queries, the number of +clauses is calculated as the number of fields multiplied by the number of terms. =================================================== +// end::max-clause-limit[] ==== Per-field boosting -Individual fields can be boosted with the caret (`^`) notation: +Field boosts are interpreted according to the combined field model. For example, +if the `title` field has a boost of 2, the score is calculated as if each term +in the title appeared twice in the synthetic combined field. [source,console] --------------------------------------------------- +---- GET /_search { "query": { @@ -57,11 +62,8 @@ GET /_search } } } --------------------------------------------------- - -Field boosts are interpreted according to the combined field model. For example, -if the `title` field has a boost of 2, the score is calculated as if each term -in the title appeared twice in the synthetic combined field. +---- +<1> Individual fields can be boosted with the caret (`^`) notation. NOTE: The `combined_fields` query requires that field boosts are greater than or equal to 1.0. Field boosts are allowed to be fractional. @@ -149,7 +151,7 @@ term-centric: `operator` and `minimum_should_match` are applied per-term, instead of per-field. Concretely, a query like [source,console] --------------------------------------------------- +---- GET /_search { "query": { @@ -160,12 +162,15 @@ GET /_search } } } --------------------------------------------------- +---- -is executed as +is executed as: - +(combined("database", fields:["title" "abstract"])) - +(combined("systems", fields:["title", "abstract"])) +[source,txt] +---- ++(combined("database", fields:["title" "abstract"])) ++(combined("systems", fields:["title", "abstract"])) +---- In other words, each term must be present in at least one field for a document to match. @@ -178,8 +183,8 @@ to scoring based on the BM25F algorithm. [NOTE] .Custom similarities =================================================== -The `combined_fields` query currently only supports the `BM25` similarity -(which is the default unless a <> -is configured). <> are also not allowed. +The `combined_fields` query currently only supports the BM25 similarity, +which is the default unless a <> +is configured. <> are also not allowed. Using `combined_fields` in either of these cases will result in an error. =================================================== diff --git a/docs/reference/query-dsl/multi-match-query.asciidoc b/docs/reference/query-dsl/multi-match-query.asciidoc index 0b38b25ad80e1..bf9a4721a34cb 100644 --- a/docs/reference/query-dsl/multi-match-query.asciidoc +++ b/docs/reference/query-dsl/multi-match-query.asciidoc @@ -67,9 +67,7 @@ index settings, which in turn defaults to `*`. `*` extracts all fields in the ma are eligible to term queries and filters the metadata fields. All extracted fields are then combined to build a query. -WARNING: There is a limit on the number of fields times terms that can be queried -at once. It is defined by the `indices.query.bool.max_clause_count` <> -which defaults to 4096. +include::combined-fields-query.asciidoc[tag=max-clause-limit] [[multi-match-types]] [discrete] From a127154042a8bb36199d97ececf2962661b395b7 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Thu, 26 Aug 2021 14:07:06 -0400 Subject: [PATCH 023/128] [DOCS] Fix the terms enum API docs for search_after (#76991) --- docs/reference/search/terms-enum.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/search/terms-enum.asciidoc b/docs/reference/search/terms-enum.asciidoc index daa542c6ffd4a..fdf4f201aa3a4 100644 --- a/docs/reference/search/terms-enum.asciidoc +++ b/docs/reference/search/terms-enum.asciidoc @@ -98,8 +98,8 @@ Defaults to false. query rewrites to `match_none`. [[terms-enum-search_after-param]] -`string`:: +`search_after`:: (Optional, string) The string after which terms in the index should be returned. Allows for a form of -pagination if the last result from one request is passed as the search_after +pagination if the last result from one request is passed as the `search_after` parameter for a subsequent request. From 718b1635e2989f49f59a07e78dad887b82218ba2 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Thu, 26 Aug 2021 12:09:43 -0700 Subject: [PATCH 024/128] Add a direct sub classes data structure to the Painless lookup (#76955) This change has two main components. The first is to have method/field resolution for compile-time and run-time use the same code path for now. This removes copying of member methods between super and sub classes and instead does a resolution through the class hierarchy. This allows us to correctly implement the next change. The second is a data structure that allows for the lookup of direct sub classes for all allow listed classes/interfaces within Painless. --- .../painless/lookup/PainlessLookup.java | 86 ++++++++----- .../lookup/PainlessLookupBuilder.java | 106 ++++++++-------- .../elasticsearch/painless/LookupTests.java | 116 ++++++++++++++++++ .../org.elasticsearch.painless.lookup | 35 ++++++ 4 files changed, 258 insertions(+), 85 deletions(-) create mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java create mode 100644 modules/lang-painless/src/test/resources/org/elasticsearch/painless/org.elasticsearch.painless.lookup diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index dfab5dc8b324c..464ae43c8072f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -9,6 +9,10 @@ package org.elasticsearch.painless.lookup; import java.lang.invoke.MethodHandle; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -25,6 +29,7 @@ public final class PainlessLookup { private final Map> javaClassNamesToClasses; private final Map> canonicalClassNamesToClasses; private final Map, PainlessClass> classesToPainlessClasses; + private final Map, Set>> classesToDirectSubClasses; private final Map painlessMethodKeysToImportedPainlessMethods; private final Map painlessMethodKeysToPainlessClassBindings; @@ -34,6 +39,7 @@ public final class PainlessLookup { Map> javaClassNamesToClasses, Map> canonicalClassNamesToClasses, Map, PainlessClass> classesToPainlessClasses, + Map, Set>> classesToDirectSubClasses, Map painlessMethodKeysToImportedPainlessMethods, Map painlessMethodKeysToPainlessClassBindings, Map painlessMethodKeysToPainlessInstanceBindings) { @@ -41,6 +47,7 @@ public final class PainlessLookup { Objects.requireNonNull(javaClassNamesToClasses); Objects.requireNonNull(canonicalClassNamesToClasses); Objects.requireNonNull(classesToPainlessClasses); + Objects.requireNonNull(classesToDirectSubClasses); Objects.requireNonNull(painlessMethodKeysToImportedPainlessMethods); Objects.requireNonNull(painlessMethodKeysToPainlessClassBindings); @@ -49,6 +56,7 @@ public final class PainlessLookup { this.javaClassNamesToClasses = javaClassNamesToClasses; this.canonicalClassNamesToClasses = Map.copyOf(canonicalClassNamesToClasses); this.classesToPainlessClasses = Map.copyOf(classesToPainlessClasses); + this.classesToDirectSubClasses = Map.copyOf(classesToDirectSubClasses); this.painlessMethodKeysToImportedPainlessMethods = Map.copyOf(painlessMethodKeysToImportedPainlessMethods); this.painlessMethodKeysToPainlessClassBindings = Map.copyOf(painlessMethodKeysToPainlessClassBindings); @@ -75,6 +83,10 @@ public Set> getClasses() { return classesToPainlessClasses.keySet(); } + public Set> getDirectSubClasses(Class superClass) { + return classesToDirectSubClasses.get(superClass); + } + public Set getImportedPainlessMethodsKeys() { return painlessMethodKeysToImportedPainlessMethods.keySet(); } @@ -142,16 +154,12 @@ public PainlessMethod lookupPainlessMethod(Class targetClass, boolean isStati targetClass = typeToBoxedType(targetClass); } - PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass); String painlessMethodKey = buildPainlessMethodKey(methodName, methodArity); + Function objectLookup = isStatic ? + targetPainlessClass -> targetPainlessClass.staticMethods.get(painlessMethodKey) : + targetPainlessClass -> targetPainlessClass.methods.get(painlessMethodKey); - if (targetPainlessClass == null) { - return null; - } - - return isStatic ? - targetPainlessClass.staticMethods.get(painlessMethodKey) : - targetPainlessClass.methods.get(painlessMethodKey); + return lookupPainlessObject(targetClass, objectLookup); } public PainlessField lookupPainlessField(String targetCanonicalClassName, boolean isStatic, String fieldName) { @@ -170,22 +178,12 @@ public PainlessField lookupPainlessField(Class targetClass, boolean isStatic, Objects.requireNonNull(targetClass); Objects.requireNonNull(fieldName); - PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass); String painlessFieldKey = buildPainlessFieldKey(fieldName); + Function objectLookup = isStatic ? + targetPainlessClass -> targetPainlessClass.staticFields.get(painlessFieldKey) : + targetPainlessClass -> targetPainlessClass.fields.get(painlessFieldKey); - if (targetPainlessClass == null) { - return null; - } - - PainlessField painlessField = isStatic ? - targetPainlessClass.staticFields.get(painlessFieldKey) : - targetPainlessClass.fields.get(painlessFieldKey); - - if (painlessField == null) { - return null; - } - - return painlessField; + return lookupPainlessObject(targetClass, objectLookup); } public PainlessMethod lookupImportedPainlessMethod(String methodName, int arity) { @@ -230,7 +228,7 @@ public PainlessMethod lookupRuntimePainlessMethod(Class originalTargetClass, Function objectLookup = targetPainlessClass -> targetPainlessClass.runtimeMethods.get(painlessMethodKey); - return lookupRuntimePainlessObject(originalTargetClass, objectLookup); + return lookupPainlessObject(originalTargetClass, objectLookup); } public MethodHandle lookupRuntimeGetterMethodHandle(Class originalTargetClass, String getterName) { @@ -239,7 +237,7 @@ public MethodHandle lookupRuntimeGetterMethodHandle(Class originalTargetClass Function objectLookup = targetPainlessClass -> targetPainlessClass.getterMethodHandles.get(getterName); - return lookupRuntimePainlessObject(originalTargetClass, objectLookup); + return lookupPainlessObject(originalTargetClass, objectLookup); } public MethodHandle lookupRuntimeSetterMethodHandle(Class originalTargetClass, String setterName) { @@ -248,10 +246,13 @@ public MethodHandle lookupRuntimeSetterMethodHandle(Class originalTargetClass Function objectLookup = targetPainlessClass -> targetPainlessClass.setterMethodHandles.get(setterName); - return lookupRuntimePainlessObject(originalTargetClass, objectLookup); + return lookupPainlessObject(originalTargetClass, objectLookup); } - private T lookupRuntimePainlessObject(Class originalTargetClass, Function objectLookup) { + private T lookupPainlessObject(Class originalTargetClass, Function objectLookup) { + Objects.requireNonNull(originalTargetClass); + Objects.requireNonNull(objectLookup); + Class currentTargetClass = originalTargetClass; while (currentTargetClass != null) { @@ -268,17 +269,38 @@ private T lookupRuntimePainlessObject(Class originalTargetClass, Function currentTargetClass = currentTargetClass.getSuperclass(); } + if (originalTargetClass.isInterface()) { + PainlessClass targetPainlessClass = classesToPainlessClasses.get(Object.class); + + if (targetPainlessClass != null) { + T painlessObject = objectLookup.apply(targetPainlessClass); + + if (painlessObject != null) { + return painlessObject; + } + } + } + currentTargetClass = originalTargetClass; + Set> resolvedInterfaces = new HashSet<>(); while (currentTargetClass != null) { - for (Class targetInterface : currentTargetClass.getInterfaces()) { - PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetInterface); + List> targetInterfaces = new ArrayList<>(Arrays.asList(currentTargetClass.getInterfaces())); + + while (targetInterfaces.isEmpty() == false) { + Class targetInterface = targetInterfaces.remove(0); + + if (resolvedInterfaces.add(targetInterface)) { + PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetInterface); + + if (targetPainlessClass != null) { + T painlessObject = objectLookup.apply(targetPainlessClass); - if (targetPainlessClass != null) { - T painlessObject = objectLookup.apply(targetPainlessClass); + if (painlessObject != null) { + return painlessObject; + } - if (painlessObject != null) { - return painlessObject; + targetInterfaces.addAll(Arrays.asList(targetInterface.getInterfaces())); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index a7390b5415870..b819b1e134048 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -42,11 +42,14 @@ import java.security.SecureClassLoader; import java.security.cert.Certificate; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.function.Supplier; import java.util.regex.Pattern; @@ -189,6 +192,7 @@ public static PainlessLookup buildFromWhitelists(List whitelists) { // of the values of javaClassNamesToClasses. private final Map> canonicalClassNamesToClasses; private final Map, PainlessClassBuilder> classesToPainlessClassBuilders; + private final Map, Set>> classesToDirectSubClasses; private final Map painlessMethodKeysToImportedPainlessMethods; private final Map painlessMethodKeysToPainlessClassBindings; @@ -198,6 +202,7 @@ public PainlessLookupBuilder() { javaClassNamesToClasses = new HashMap<>(); canonicalClassNamesToClasses = new HashMap<>(); classesToPainlessClassBuilders = new HashMap<>(); + classesToDirectSubClasses = new HashMap<>(); painlessMethodKeysToImportedPainlessMethods = new HashMap<>(); painlessMethodKeysToPainlessClassBindings = new HashMap<>(); @@ -1255,7 +1260,7 @@ public void addPainlessInstanceBinding( } public PainlessLookup build() { - copyPainlessClassMembers(); + buildPainlessClassHierarchy(); setFunctionalInterfaceMethods(); generateRuntimeMethods(); cacheRuntimeHandles(); @@ -1286,71 +1291,66 @@ public PainlessLookup build() { javaClassNamesToClasses, canonicalClassNamesToClasses, classesToPainlessClasses, + classesToDirectSubClasses, painlessMethodKeysToImportedPainlessMethods, painlessMethodKeysToPainlessClassBindings, painlessMethodKeysToPainlessInstanceBindings); } - private void copyPainlessClassMembers() { - for (Class parentClass : classesToPainlessClassBuilders.keySet()) { - copyPainlessInterfaceMembers(parentClass, parentClass); - - Class childClass = parentClass.getSuperclass(); - - while (childClass != null) { - if (classesToPainlessClassBuilders.containsKey(childClass)) { - copyPainlessClassMembers(childClass, parentClass); - } - - copyPainlessInterfaceMembers(childClass, parentClass); - childClass = childClass.getSuperclass(); - } - } - - for (Class javaClass : classesToPainlessClassBuilders.keySet()) { - if (javaClass.isInterface()) { - copyPainlessClassMembers(Object.class, javaClass); - } - } - } - - private void copyPainlessInterfaceMembers(Class parentClass, Class targetClass) { - for (Class childClass : parentClass.getInterfaces()) { - if (classesToPainlessClassBuilders.containsKey(childClass)) { - copyPainlessClassMembers(childClass, targetClass); - } - - copyPainlessInterfaceMembers(childClass, targetClass); + private void buildPainlessClassHierarchy() { + for (Class targetClass : classesToPainlessClassBuilders.keySet()) { + classesToDirectSubClasses.put(targetClass, new HashSet<>()); } - } - private void copyPainlessClassMembers(Class originalClass, Class targetClass) { - PainlessClassBuilder originalPainlessClassBuilder = classesToPainlessClassBuilders.get(originalClass); - PainlessClassBuilder targetPainlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); + for (Class subClass : classesToPainlessClassBuilders.keySet()) { + List> superInterfaces = new ArrayList<>(Arrays.asList(subClass.getInterfaces())); - Objects.requireNonNull(originalPainlessClassBuilder); - Objects.requireNonNull(targetPainlessClassBuilder); + // we check for Object.class as part of the allow listed classes because + // it is possible for the compiler to work without Object + if (subClass.isInterface() && superInterfaces.isEmpty() && classesToPainlessClassBuilders.containsKey(Object.class)) { + classesToDirectSubClasses.get(Object.class).add(subClass); + } else { + Class superClass = subClass.getSuperclass(); + + // this finds the nearest super class for a given sub class + // because the allow list may have gaps between classes + // example: + // class A {} // allowed + // class B extends A // not allowed + // class C extends B // allowed + // in this case C is considered a direct sub class of A + while (superClass != null) { + if (classesToPainlessClassBuilders.containsKey(superClass)) { + break; + } else { + // this ensures all interfaces from a sub class that + // is not allow listed are checked if they are + // considered a direct super class of the sub class + // because these interfaces may still be allow listed + // even if their sub class is not + superInterfaces.addAll(Arrays.asList(superClass.getInterfaces())); + } - for (Map.Entry painlessMethodEntry : originalPainlessClassBuilder.methods.entrySet()) { - String painlessMethodKey = painlessMethodEntry.getKey(); - PainlessMethod newPainlessMethod = painlessMethodEntry.getValue(); - PainlessMethod existingPainlessMethod = targetPainlessClassBuilder.methods.get(painlessMethodKey); + superClass = superClass.getSuperclass(); + } - if (existingPainlessMethod == null || existingPainlessMethod.targetClass != newPainlessMethod.targetClass && - existingPainlessMethod.targetClass.isAssignableFrom(newPainlessMethod.targetClass)) { - targetPainlessClassBuilder.methods.put(painlessMethodKey.intern(), newPainlessMethod); + if (superClass != null) { + classesToDirectSubClasses.get(superClass).add(subClass); + } } - } - for (Map.Entry painlessFieldEntry : originalPainlessClassBuilder.fields.entrySet()) { - String painlessFieldKey = painlessFieldEntry.getKey(); - PainlessField newPainlessField = painlessFieldEntry.getValue(); - PainlessField existingPainlessField = targetPainlessClassBuilder.fields.get(painlessFieldKey); + Set> resolvedInterfaces = new HashSet<>(); + + while (superInterfaces.isEmpty() == false) { + Class superInterface = superInterfaces.remove(0); - if (existingPainlessField == null || - existingPainlessField.javaField.getDeclaringClass() != newPainlessField.javaField.getDeclaringClass() && - existingPainlessField.javaField.getDeclaringClass().isAssignableFrom(newPainlessField.javaField.getDeclaringClass())) { - targetPainlessClassBuilder.fields.put(painlessFieldKey.intern(), newPainlessField); + if (resolvedInterfaces.add(superInterface)) { + if (classesToPainlessClassBuilders.containsKey(superInterface)) { + classesToDirectSubClasses.get(superInterface).add(subClass); + } else { + superInterfaces.addAll(Arrays.asList(superInterface.getInterfaces())); + } + } } } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java new file mode 100644 index 0000000000000..a0ebbb5b25024 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.painless; + +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupBuilder; +import org.elasticsearch.painless.spi.WhitelistLoader; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.Collections; +import java.util.Set; + +public class LookupTests extends ESTestCase { + + protected PainlessLookup painlessLookup; + + @Before + public void setup() { + painlessLookup = PainlessLookupBuilder.buildFromWhitelists(Collections.singletonList( + WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.lookup") + )); + } + + public static class A { } // in whitelist + public static class B extends A { } // not in whitelist + public static class C extends B { } // in whitelist + public static class D extends B { } // in whitelist + + public interface Z { } // in whitelist + public interface Y { } // not in whitelist + public interface X extends Y, Z { } // not in whitelist + public interface V extends Y, Z { } // in whitelist + public interface U extends X { } // in whitelist + public interface T extends V { } // in whitelist + public interface S extends U, X { } // in whitelist + + public static class AA implements X { } // in whitelist + public static class AB extends AA implements S { } // not in whitelist + public static class AC extends AB implements V { } // in whitelist + public static class AD implements X, S, T { } // in whitelist + + public void testDirectSubClasses() { + Set> directSubClasses = painlessLookup.getDirectSubClasses(Object.class); + assertEquals(4, directSubClasses.size()); + assertTrue(directSubClasses.contains(A.class)); + assertTrue(directSubClasses.contains(Z.class)); + assertTrue(directSubClasses.contains(AA.class)); + assertTrue(directSubClasses.contains(AD.class)); + + directSubClasses = painlessLookup.getDirectSubClasses(A.class); + assertEquals(2, directSubClasses.size()); + assertTrue(directSubClasses.contains(D.class)); + assertTrue(directSubClasses.contains(C.class)); + + directSubClasses = painlessLookup.getDirectSubClasses(B.class); + assertNull(directSubClasses); + + directSubClasses = painlessLookup.getDirectSubClasses(C.class); + assertTrue(directSubClasses.isEmpty()); + + directSubClasses = painlessLookup.getDirectSubClasses(D.class); + assertTrue(directSubClasses.isEmpty()); + + directSubClasses = painlessLookup.getDirectSubClasses(Z.class); + assertEquals(5, directSubClasses.size()); + assertTrue(directSubClasses.contains(V.class)); + assertTrue(directSubClasses.contains(U.class)); + assertTrue(directSubClasses.contains(S.class)); + assertTrue(directSubClasses.contains(AA.class)); + assertTrue(directSubClasses.contains(AD.class)); + + directSubClasses = painlessLookup.getDirectSubClasses(Y.class); + assertNull(directSubClasses); + + directSubClasses = painlessLookup.getDirectSubClasses(X.class); + assertNull(directSubClasses); + + directSubClasses = painlessLookup.getDirectSubClasses(V.class); + assertEquals(2, directSubClasses.size()); + assertTrue(directSubClasses.contains(T.class)); + assertTrue(directSubClasses.contains(AC.class)); + + directSubClasses = painlessLookup.getDirectSubClasses(U.class); + assertEquals(1, directSubClasses.size()); + assertTrue(directSubClasses.contains(S.class)); + + directSubClasses = painlessLookup.getDirectSubClasses(T.class); + assertEquals(1, directSubClasses.size()); + assertTrue(directSubClasses.contains(AD.class)); + + directSubClasses = painlessLookup.getDirectSubClasses(S.class); + assertEquals(2, directSubClasses.size()); + assertTrue(directSubClasses.contains(AC.class)); + assertTrue(directSubClasses.contains(AD.class)); + + directSubClasses = painlessLookup.getDirectSubClasses(AA.class); + assertEquals(1, directSubClasses.size()); + assertTrue(directSubClasses.contains(AC.class)); + + directSubClasses = painlessLookup.getDirectSubClasses(AB.class); + assertNull(directSubClasses); + + directSubClasses = painlessLookup.getDirectSubClasses(AC.class); + assertTrue(directSubClasses.isEmpty()); + + directSubClasses = painlessLookup.getDirectSubClasses(AD.class); + assertTrue(directSubClasses.isEmpty()); + } +} diff --git a/modules/lang-painless/src/test/resources/org/elasticsearch/painless/org.elasticsearch.painless.lookup b/modules/lang-painless/src/test/resources/org/elasticsearch/painless/org.elasticsearch.painless.lookup new file mode 100644 index 0000000000000..b6a5adc6208b7 --- /dev/null +++ b/modules/lang-painless/src/test/resources/org/elasticsearch/painless/org.elasticsearch.painless.lookup @@ -0,0 +1,35 @@ +class java.lang.Object { +} + +class org.elasticsearch.painless.LookupTests$A { +} + +class org.elasticsearch.painless.LookupTests$C { +} + +class org.elasticsearch.painless.LookupTests$D { +} + +class org.elasticsearch.painless.LookupTests$Z { +} + +class org.elasticsearch.painless.LookupTests$V { +} + +class org.elasticsearch.painless.LookupTests$U { +} + +class org.elasticsearch.painless.LookupTests$T { +} + +class org.elasticsearch.painless.LookupTests$S { +} + +class org.elasticsearch.painless.LookupTests$AA { +} + +class org.elasticsearch.painless.LookupTests$AC { +} + +class org.elasticsearch.painless.LookupTests$AD { +} \ No newline at end of file From 12dda6fb366071736f3115ff0822377ec9777ec4 Mon Sep 17 00:00:00 2001 From: debadair Date: Thu, 26 Aug 2021 12:44:50 -0700 Subject: [PATCH 025/128] [DOCS] Add ILM error/troubleshooting info. Closes #75849 (#76957) * [DOCS] Add ILM error/troubleshooting info. Closes #75849 * Apply suggestions from code review Co-authored-by: James Rodewig <40268737+jrodewig@users.noreply.github.com> * Updated xref & fixed whitespace issues Co-authored-by: James Rodewig <40268737+jrodewig@users.noreply.github.com> --- docs/reference/ilm/error-handling.asciidoc | 68 +++++++++++++++++++++- 1 file changed, 67 insertions(+), 1 deletion(-) diff --git a/docs/reference/ilm/error-handling.asciidoc b/docs/reference/ilm/error-handling.asciidoc index 500ac99ec129f..d125c557323e9 100644 --- a/docs/reference/ilm/error-handling.asciidoc +++ b/docs/reference/ilm/error-handling.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[index-lifecycle-error-handling]] -== Resolve lifecycle policy execution errors +== Troubleshooting {ilm} errors When {ilm-init} executes a lifecycle policy, it's possible for errors to occur while performing the necessary index operations for a step. @@ -147,3 +147,69 @@ POST /my-index-000001/_ilm/retry {ilm-init} subsequently attempts to re-run the step that failed. You can use the <> to monitor the progress. + +[discrete] +=== Common {ilm-init} errors + +Here's how to resolve the most common errors reported in the `ERROR` step. + +TIP: Problems with rollover aliases are a common cause of errors. +Consider using <> instead of managing rollover with aliases. + +[discrete] +==== Rollover alias [x] can point to multiple indices, found duplicated alias [x] in index template [z] + +The target rollover alias is specified in an index template's `index.lifecycle.rollover_alias` setting. +You need to explicitly configure this alias _one time_ when you +<>. +The rollover action then manages setting and updating the alias to +<> to each subsequent index. + +Do not explicitly configure this same alias in the aliases section of an index template. + +[discrete] +==== index.lifecycle.rollover_alias [x] does not point to index [y] + +Either the index is using the wrong alias or the alias does not exist. + +Check the `index.lifecycle.rollover_alias` <>. +To see what aliases are configured, use <>. + +[discrete] +==== Setting [index.lifecycle.rollover_alias] for index [y] is empty or not defined + +The `index.lifecycle.rollover_alias` setting must be configured for the rollover action to work. + +Update the index settings to set `index.lifecycle.rollover_alias`. + +[discrete] +==== Alias [x] has more than one write index [y,z] + +Only one index can be designated as the write index for a particular alias. + +Use the <> API to set `is_write_index:false` for all but one index. + +[discrete] +==== index name [x] does not match pattern ^.*-\d+ + +The index name must match the regex pattern `^.*-\d+` for the rollover action to work. +The most common problem is that the index name does not contain trailing digits. +For example, `my-index` does not match the pattern requirement. + +Append a numeric value to the index name, for example `my-index-000001`. + +[discrete] +==== CircuitBreakingException: [x] data too large, data for [y] + +This indicates that the cluster is hitting resource limits. + +Before continuing to set up {ilm-init}, you'll need to take steps to alleviate the resource issues. +For more information, see <>. + +[discrete] +==== High disk watermark [x] exceeded on [y] + +This indicates that the cluster is running out of disk space. +This can happen when you don't have {ilm} set up to roll over from hot to warm nodes. + +Consider adding nodes, upgrading your hardware, or deleting unneeded indices. \ No newline at end of file From c605cdf0c6002ffec0aa58f0d2201af1bb4f23a7 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Fri, 27 Aug 2021 10:31:58 +1000 Subject: [PATCH 026/128] [Test] More robust assertions for watcher execution (#76977) Since the test is really for making sure the serialised authentication header can work after cluster upgrade, it is sufficient to just assert that the watcher execute successfully once regardless of the total number of execution. --- .../xpack/restart/FullClusterRestartIT.java | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index d01d4ba23c7f6..47975872ffe10 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -45,6 +45,7 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import static org.elasticsearch.core.TimeValue.timeValueSeconds; @@ -53,7 +54,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItems; @@ -202,11 +202,9 @@ public void testWatcher() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/63088") @SuppressWarnings("unchecked") public void testWatcherWithApiKey() throws Exception { final Request getWatchStatusRequest = new Request("GET", "/_watcher/watch/watch_with_api_key"); - getWatchStatusRequest.addParameter("filter_path", "status"); if (isRunningAgainstOldCluster()) { final Request createApiKeyRequest = new Request("PUT", "/_security/api_key"); @@ -247,11 +245,19 @@ public void testWatcherWithApiKey() throws Exception { final Map status = (Map) getWatchStatusResponse.get("status"); final int version = (int) status.get("version"); + final AtomicBoolean versionIncreased = new AtomicBoolean(); + final AtomicBoolean executed = new AtomicBoolean(); assertBusy(() -> { final Map newGetWatchStatusResponse = entityAsMap(client().performRequest(getWatchStatusRequest)); final Map newStatus = (Map) newGetWatchStatusResponse.get("status"); - assertThat((int) newStatus.get("version"), greaterThan(version + 2)); - assertEquals("executed", newStatus.get("execution_state")); + if (false == versionIncreased.get() && version < (int) newStatus.get("version")) { + versionIncreased.set(true); + } + if (false == executed.get() && "executed".equals(newStatus.get("execution_state"))) { + executed.set(true); + } + assertThat("version increased: [" + versionIncreased.get() + "], executed: [" + executed.get() + "]", + versionIncreased.get() && executed.get(), is(true)); }); } finally { stopWatcher(); From 4886753dec7df83dd34ba04b4b7775de9c612cf3 Mon Sep 17 00:00:00 2001 From: Dan Hermann Date: Fri, 27 Aug 2021 07:32:02 -0500 Subject: [PATCH 027/128] [DOCS] Final pipelines may not change target index (#76997) --- docs/reference/index-modules.asciidoc | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 63932c4d5a72a..2678715e85ae3 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -327,11 +327,14 @@ Defaults to `*`, which matches all fields eligible for [[index-final-pipeline]] `index.final_pipeline`:: - The final <> pipeline for this index. Index requests - will fail if the final pipeline is set and the pipeline does not exist. - The final pipeline always runs after the request pipeline (if specified) and - the default pipeline (if it exists). The special pipeline name `_none` - indicates no ingest pipeline will run. +The final <> pipeline for this index. Indexing requests +will fail if the final pipeline is set and the pipeline does not exist. +The final pipeline always runs after the request pipeline (if specified) and +the default pipeline (if it exists). The special pipeline name `_none` +indicates no ingest pipeline will run. ++ +NOTE: You can't use a final pipelines to change the `_index` field. If the +pipeline attempts to change the `_index` field, the indexing request will fail. [[index-mapping-dimension-fields-limit]] `index.mapping.dimension_fields.limit`:: From c4aad2965f5e7c122c0bacdb79f7bbdc2b211e79 Mon Sep 17 00:00:00 2001 From: Dan Hermann Date: Fri, 27 Aug 2021 07:35:11 -0500 Subject: [PATCH 028/128] [DOCS] Map iteration support in ForEach processor (#76972) --- .../ingest/processors/foreach.asciidoc | 211 +++++++++++++++--- 1 file changed, 184 insertions(+), 27 deletions(-) diff --git a/docs/reference/ingest/processors/foreach.asciidoc b/docs/reference/ingest/processors/foreach.asciidoc index 7a8c29ff24a89..3cdf0319aabc9 100644 --- a/docs/reference/ingest/processors/foreach.asciidoc +++ b/docs/reference/ingest/processors/foreach.asciidoc @@ -4,39 +4,70 @@ Foreach ++++ -Processes elements in an array of unknown length. +Runs an ingest processor on each element of an array or object. -All processors can operate on elements inside an array, but if all elements of an array need to -be processed in the same way, defining a processor for each element becomes cumbersome and tricky -because it is likely that the number of elements in an array is unknown. For this reason the `foreach` -processor exists. By specifying the field holding array elements and a processor that -defines what should happen to each element, array fields can easily be preprocessed. +All ingest processors can run on array or object elements. However, if the +number of elements is unknown, it can be cumbersome to process each one in the +same way. -A processor inside the foreach processor works in the array element context and puts that in the ingest metadata -under the `_ingest._value` key. If the array element is a json object it holds all immediate fields of that json object. -and if the nested object is a value is `_ingest._value` just holds that value. Note that if a processor prior to the -`foreach` processor used `_ingest._value` key then the specified value will not be available to the processor inside -the `foreach` processor. The `foreach` processor does restore the original value, so that value is available to processors -after the `foreach` processor. - -Note that any other field from the document are accessible and modifiable like with all other processors. This processor -just puts the current array element being read into `_ingest._value` ingest metadata attribute, so that it may be -pre-processed. - -If the `foreach` processor fails to process an element inside the array, and no `on_failure` processor has been specified, -then it aborts the execution and leaves the array unmodified. +The `foreach` processor lets you specify a `field` containing array or object +values and a `processor` to run on each element in the field. [[foreach-options]] .Foreach Options [options="header"] |====== | Name | Required | Default | Description -| `field` | yes | - | The array field -| `processor` | yes | - | The processor to execute against each field -| `ignore_missing` | no | false | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document +| `field` | yes | - | Field containing array or object +values. +| `processor` | yes | - | Ingest processor to run on each +element. +| `ignore_missing` | no | false | If `true`, the processor silently +exits without changing the document if the `field` is `null` or missing. include::common-options.asciidoc[] |====== +[discrete] +[[foreach-keys-values]] +==== Access keys and values + +When iterating through an array or object, the `foreach` processor stores the +current element's value in the `_ingest._value` <> field. `_ingest._value` contains the entire element value, including +any child fields. You can access child field values using dot notation on the +`_ingest._value` field. + +When iterating through an object, the `foreach` processor also stores the +current element's key as a string in `_ingest._key`. + +You can access and change `_ingest._key` and `_ingest._value` in the +`processor`. For an example, see the <>. + +[discrete] +[[foreach-failure-handling]] +==== Failure handling + +If the `foreach` processor fails to process an element and no `on_failure` +processor is specified, the `foreach` processor silently exits. This leaves the +entire array or object value unchanged. + +[discrete] +[[foreach-examples]] +==== Examples + +The following examples show how you can use the `foreach` processor with +different data types and options: + +* <> +* <> +* <> +* <> + +[discrete] +[[foreach-array-ex]] +===== Array + Assume the following document: [source,js] @@ -64,7 +95,7 @@ When this `foreach` processor operates on this sample document: -------------------------------------------------- // NOTCONSOLE -Then the document will look like this after preprocessing: +Then the document will look like this after processing: [source,js] -------------------------------------------------- @@ -74,7 +105,11 @@ Then the document will look like this after preprocessing: -------------------------------------------------- // NOTCONSOLE -Let's take a look at another example: +[discrete] +[[foreach-array-objects-ex]] +===== Array of objects + +Assume the following document: [source,js] -------------------------------------------------- @@ -111,7 +146,7 @@ so the following `foreach` processor is used: -------------------------------------------------- // NOTCONSOLE -After preprocessing the result is: +After processing the result is: [source,js] -------------------------------------------------- @@ -128,6 +163,130 @@ After preprocessing the result is: -------------------------------------------------- // NOTCONSOLE +For another array of objects example, see +{plugins}/ingest-attachment-with-arrays.html[attachment processor +documentation]. + +[discrete] +[[foreach-object-ex]] +===== Object + +You can also use the `foreach` processor on object fields. For example, +the following document contains a `products` field with object values. + +[source,js] +-------------------------------------------------- +{ + "products" : { + "widgets" : { + "total_sales" : 50, + "unit_price": 1.99, + "display_name": "" + }, + "sprockets" : { + "total_sales" : 100, + "unit_price": 9.99, + "display_name": "Super Sprockets" + }, + "whizbangs" : { + "total_sales" : 200, + "unit_price": 19.99, + "display_name": "Wonderful Whizbangs" + } + } +} +-------------------------------------------------- +// NOTCONSOLE + +The following `foreach` processor changes the value of `products.display_name` +to uppercase. + +[source,js] +-------------------------------------------------- +{ + "foreach": { + "field": "products", + "processor": { + "uppercase": { + "field": "_ingest._value.display_name" + } + } + } +} +-------------------------------------------------- +// NOTCONSOLE + +When run on the document, the `foreach` processor returns: + +[source,js] +-------------------------------------------------- +{ + "products" : { + "widgets" : { + "total_sales" : 50, + "unit_price" : 1.99, + "display_name" : "" + }, + "sprockets" : { + "total_sales" : 100, + "unit_price" : 9.99, + "display_name" : "SUPER SPROCKETS" + }, + "whizbangs" : { + "total_sales" : 200, + "unit_price" : 19.99, + "display_name" : "WONDERFUL WHIZBANGS" + } + } +} +-------------------------------------------------- +// NOTCONSOLE + +The following `foreach` processor sets each element's key to the +value of `products.display_name`. If `products.display_name` contains an empty string, +the processor deletes the element. + +[source,js] +-------------------------------------------------- +{ + "foreach": { + "field": "products", + "processor": { + "set": { + "field": "_ingest._key", + "value": "{{_ingest._value.display_name}}" + } + } + } +} +-------------------------------------------------- +// NOTCONSOLE + +When run on the previous document, the `foreach` processor returns: + +[source,js] +-------------------------------------------------- +{ + "products" : { + "Wonderful Whizbangs" : { + "total_sales" : 200, + "unit_price" : 19.99, + "display_name" : "Wonderful Whizbangs" + }, + "Super Sprockets" : { + "total_sales" : 100, + "unit_price" : 9.99, + "display_name" : "Super Sprockets" + } + } +} +-------------------------------------------------- +// NOTCONSOLE + +[discrete] +[[failure-handling-ex]] +===== Failure handling + The wrapped processor can have a `on_failure` definition. For example, the `id` field may not exist on all person objects. Instead of failing the index request, you can use an `on_failure` @@ -159,5 +318,3 @@ block to send the document to the 'failure_index' index for later inspection: In this example, if the `remove` processor does fail, then the array elements that have been processed thus far will be updated. - -Another advanced example can be found in the {plugins}/ingest-attachment-with-arrays.html[attachment processor documentation]. From 48f3784a6df01d638281d2f559ffbe87860d9758 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 30 Aug 2021 13:39:51 +0200 Subject: [PATCH 029/128] Add Sort By Shard Count and Failed Shard Count to Get Snapshots API (#77011) It's in the title. As requested by the Kibana team, adding these two additional sort columns. relates #74350 --- .../apis/get-snapshot-api.asciidoc | 6 +++++ .../http/snapshots/RestGetSnapshotsIT.java | 10 ++++++++ .../snapshots/GetSnapshotsIT.java | 10 ++++++++ .../snapshots/get/GetSnapshotsRequest.java | 19 ++++++++++++++- .../get/TransportGetSnapshotsAction.java | 24 +++++++++++++++++++ .../AbstractSnapshotIntegTestCase.java | 6 +++++ 6 files changed, 74 insertions(+), 1 deletion(-) diff --git a/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc index 60cf9b34d53c9..bb104f44a979a 100644 --- a/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc @@ -120,6 +120,12 @@ Allows setting a sort order for the result. Defaults to `start_time`, i.e. sorti `index_count`:: Sort snapshots by the number of indices they contain and break ties by snapshot name. + +`shard_count`:: + Sort snapshots by the number of shards they contain and break ties by snapshot name. + +`failed_shard_count`:: + Sort snapshots by the number of shards that they failed to snapshot and break ties by snapshot name. ==== `size`:: diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java index 70da8e782b887..1ce13280f594f 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java @@ -86,6 +86,16 @@ private void doTestSortOrder(String repoName, Collection allSnapshotName GetSnapshotsRequest.SortBy.START_TIME, order ); + assertSnapshotListSorted( + allSnapshotsSorted(allSnapshotNames, repoName, GetSnapshotsRequest.SortBy.SHARDS, order), + GetSnapshotsRequest.SortBy.SHARDS, + order + ); + assertSnapshotListSorted( + allSnapshotsSorted(allSnapshotNames, repoName, GetSnapshotsRequest.SortBy.FAILED_SHARDS, order), + GetSnapshotsRequest.SortBy.FAILED_SHARDS, + order + ); } public void testResponseSizeLimit() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java index 06a00a6c7162c..0c6d1f4d1d9fa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java @@ -78,6 +78,16 @@ private void doTestSortOrder(String repoName, Collection allSnapshotName GetSnapshotsRequest.SortBy.START_TIME, order ); + assertSnapshotListSorted( + allSnapshotsSorted(allSnapshotNames, repoName, GetSnapshotsRequest.SortBy.SHARDS, order), + GetSnapshotsRequest.SortBy.SHARDS, + order + ); + assertSnapshotListSorted( + allSnapshotsSorted(allSnapshotNames, repoName, GetSnapshotsRequest.SortBy.FAILED_SHARDS, order), + GetSnapshotsRequest.SortBy.FAILED_SHARDS, + order + ); } public void testResponseSizeLimit() throws Exception { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java index 5da8fa14e3350..5c172fce80bf9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java @@ -46,6 +46,8 @@ public class GetSnapshotsRequest extends MasterNodeRequest public static final Version NUMERIC_PAGINATION_VERSION = Version.V_7_15_0; + private static final Version SORT_BY_SHARD_COUNTS_VERSION = Version.V_8_0_0; + public static final int NO_LIMIT = -1; /** @@ -136,6 +138,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(verbose); if (out.getVersion().onOrAfter(PAGINATED_GET_SNAPSHOTS_VERSION)) { out.writeOptionalWriteable(after); + if ((sort == SortBy.SHARDS || sort == SortBy.FAILED_SHARDS) && out.getVersion().before(SORT_BY_SHARD_COUNTS_VERSION)) { + throw new IllegalArgumentException("can't use sort by shard count with node version [" + out.getVersion() + "]"); + } out.writeEnum(sort); out.writeVInt(size); order.writeTo(out); @@ -320,7 +325,9 @@ public enum SortBy { START_TIME("start_time"), NAME("name"), DURATION("duration"), - INDICES("index_count"); + INDICES("index_count"), + SHARDS("shard_count"), + FAILED_SHARDS("failed_shard_count"); private final String param; @@ -343,6 +350,10 @@ public static SortBy of(String value) { return DURATION; case "index_count": return INDICES; + case "shard_count": + return SHARDS; + case "failed_shard_count": + return FAILED_SHARDS; default: throw new IllegalArgumentException("unknown sort order [" + value + "]"); } @@ -388,6 +399,12 @@ public static After from(@Nullable SnapshotInfo snapshotInfo, SortBy sortBy) { case INDICES: afterValue = String.valueOf(snapshotInfo.indices().size()); break; + case SHARDS: + afterValue = String.valueOf(snapshotInfo.totalShards()); + break; + case FAILED_SHARDS: + afterValue = String.valueOf(snapshotInfo.failedShards()); + break; default: throw new AssertionError("unknown sort column [" + sortBy + "]"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 2da538f628e76..39b1ae3486823 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -484,6 +484,12 @@ private static SnapshotsInRepo buildSimpleSnapshotInfos( private static final Comparator BY_INDICES_COUNT = Comparator.comparingInt(sni -> sni.indices().size()) .thenComparing(SnapshotInfo::snapshotId); + private static final Comparator BY_SHARDS_COUNT = Comparator.comparingInt(SnapshotInfo::totalShards) + .thenComparing(SnapshotInfo::snapshotId); + + private static final Comparator BY_FAILED_SHARDS_COUNT = Comparator.comparingInt(SnapshotInfo::failedShards) + .thenComparing(SnapshotInfo::snapshotId); + private static final Comparator BY_NAME = Comparator.comparing(sni -> sni.snapshotId().getName()); private static SnapshotsInRepo sortSnapshots( @@ -508,6 +514,12 @@ private static SnapshotsInRepo sortSnapshots( case INDICES: comparator = BY_INDICES_COUNT; break; + case SHARDS: + comparator = BY_SHARDS_COUNT; + break; + case FAILED_SHARDS: + comparator = BY_FAILED_SHARDS_COUNT; + break; default: throw new AssertionError("unexpected sort column [" + sortBy + "]"); } @@ -546,6 +558,18 @@ private static SnapshotsInRepo sortSnapshots( order ); break; + case SHARDS: + isAfter = filterByLongOffset(SnapshotInfo::totalShards, Integer.parseInt(after.value()), snapshotName, repoName, order); + break; + case FAILED_SHARDS: + isAfter = filterByLongOffset( + SnapshotInfo::failedShards, + Integer.parseInt(after.value()), + snapshotName, + repoName, + order + ); + break; default: throw new AssertionError("unexpected sort column [" + sortBy + "]"); } diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index d5d6abfdb8c45..de42a76eef945 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -698,6 +698,12 @@ public static void assertSnapshotListSorted(List snapshotInfos, @N case INDICES: assertion = (s1, s2) -> assertThat(s2.indices().size(), greaterThanOrEqualTo(s1.indices().size())); break; + case SHARDS: + assertion = (s1, s2) -> assertThat(s2.totalShards(), greaterThanOrEqualTo(s1.totalShards())); + break; + case FAILED_SHARDS: + assertion = (s1, s2) -> assertThat(s2.failedShards(), greaterThanOrEqualTo(s1.failedShards())); + break; default: throw new AssertionError("unknown sort column [" + sort + "]"); } From 111100ebfdd79a1d64a236621b230ee77ac89196 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 30 Aug 2021 14:59:04 +0200 Subject: [PATCH 030/128] Adjust GetSnapshotsRequest BwC Serialization after Backport (#77019) Once #77018 is merged, we can merge this to enabled the new sort columns in 7.16+. --- .../action/admin/cluster/snapshots/get/GetSnapshotsRequest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java index 5c172fce80bf9..158acb9092aed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java @@ -46,7 +46,7 @@ public class GetSnapshotsRequest extends MasterNodeRequest public static final Version NUMERIC_PAGINATION_VERSION = Version.V_7_15_0; - private static final Version SORT_BY_SHARD_COUNTS_VERSION = Version.V_8_0_0; + private static final Version SORT_BY_SHARD_COUNTS_VERSION = Version.V_7_16_0; public static final int NO_LIMIT = -1; From ea007902ef45c9842414239be99a6d56289e9f75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Mon, 30 Aug 2021 16:11:34 +0200 Subject: [PATCH 031/128] [DOCS] Adds anomaly job health alert type docs (#76659) Co-authored-by: Lisa Cawley --- .../ml-configuring-alerts.asciidoc | 335 ++++++++++++++++-- .../ml/images/ml-anomaly-alert-type.jpg | Bin 105391 -> 0 bytes .../ml/images/ml-health-check-config.jpg | Bin 0 -> 72237 bytes docs/reference/ml/images/ml-rule.jpg | Bin 0 -> 120839 bytes 4 files changed, 303 insertions(+), 32 deletions(-) delete mode 100644 docs/reference/ml/images/ml-anomaly-alert-type.jpg create mode 100644 docs/reference/ml/images/ml-health-check-config.jpg create mode 100644 docs/reference/ml/images/ml-rule.jpg diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc index cebb40ac0bbcd..6dd13006f4601 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc @@ -5,42 +5,61 @@ beta::[] {kib} {alert-features} include support for {ml} rules, which run scheduled -checks on an {anomaly-job} or a group of jobs to detect anomalies with certain -conditions. If an anomaly meets the conditions, an alert is created and the -associated action is triggered. For example, you can create a rule to check an -{anomaly-job} every fifteen minutes for critical anomalies and to notify you in -an email. To learn more about {kib} {alert-features}, refer to +checks for anomalies in one or more {anomaly-jobs} or check the +health of the job with certain conditions. If the conditions of the rule are met, an +alert is created and the associated action is triggered. For example, you can +create a rule to check an {anomaly-job} every fifteen minutes for critical +anomalies and to notify you in an email. To learn more about {kib} +{alert-features}, refer to {kibana-ref}/alerting-getting-started.html#alerting-getting-started[Alerting]. +The following {ml} rules are available: -[[creating-anomaly-alert-rules]] +{anomaly-detect-cap} alert:: + Checks if the {anomaly-job} results contain anomalies that match the rule + conditions. + +{anomaly-jobs-cap} health:: + Monitors job health and alerts if an operational issue occurred that may + prevent the job from detecting anomalies. + +TIP: If you have created rules for specific {anomaly-jobs} and you want to +monitor whether these jobs work as expected, {anomaly-jobs} health rules are +ideal for this purpose. + + +[[creating-ml-rules]] == Creating a rule You can create {ml} rules in the {anomaly-job} wizard after you start the job, -from the job list, or under **{stack-manage-app} > {alerts-ui}**. On the *Create -rule* window, select *{anomaly-detect-cap} alert* under the {ml} section, then -give a name to the rule and optionally provide tags. - -Specify the time interval for the rule to check detected anomalies. It is -recommended to select an interval that is close to the bucket span of the -associated job. You can also select a notification option by using the _Notify_ -selector. An alert remains active as long as anomalies are found for a -particular {anomaly-job} during the check interval. When there is no anomaly -found in the next interval, the `Recovered` action group is invoked and the -status of the alert changes to `OK`. For more details, refer to the -documentation of +from the job list, or under **{stack-manage-app} > {alerts-ui}**. + +On the *Create rule* window, give a name to the rule and optionally provide +tags. Specify the time interval for the rule to check detected anomalies or job +health changes. It is recommended to select an interval that is close to the +bucket span of the job. You can also select a notification option with the +_Notify_ selector. An alert remains active as long as the configured conditions +are met during the check interval. When there is no matching condition in the +next interval, the `Recovered` action group is invoked and the status of the +alert changes to `OK`. For more details, refer to the documentation of {kibana-ref}/create-and-manage-rules.html#defining-rules-general-details[general rule details]. - + +Select the rule type you want to create under the {ml} section and continue to +configure it depending on whether it is an +<> or an +<> rule. + [role="screenshot"] -image::images/ml-anomaly-alert-type.jpg["Creating a rule for an anomaly detection alert"] - -Select the {anomaly-job} or the group of {anomaly-jobs} that is checked against -the rule. If you assign additional jobs to the group, the new jobs are -automatically checked the next time the conditions are checked. +image::images/ml-rule.jpg["Creating a new machine learning rule"] -You can select the result type of the {anomaly-job} that is checked against the -rule. In particular, you can create rules based on bucket, record, or influencer -results. + +[[creating-anomaly-alert-rules]] +=== {anomaly-detect-cap} alert + +Select the job that the rule applies to. + +You must select a type of {ml} result. In particular, you can create rules based +on bucket, record, or influencer results. [role="screenshot"] image::images/ml-anomaly-alert-severity.jpg["Selecting result type, severity, and test interval"] @@ -72,14 +91,61 @@ the sample results by providing a valid interval for your data. The generated preview contains the number of potentially created alerts during the relative time range you defined. +As the last step in the rule creation process, +<> that occur when the conditions +are met. + + +[[creating-anomaly-jobs-health-rules]] +=== {anomaly-jobs-cap} health + +Select the job or group that +the rule applies to. If you assign more jobs to the group, they are +included the next time the rule conditions are checked. + +You can also use a special character (`*`) to apply the rule to all your jobs. +Jobs created after the rule are automatically included. You can exclude jobs +that are not critically important by using the _Exclude_ field. + +Enable the health check types that you want to apply. All checks are enabled by +default. At least one check needs to be enabled to create the rule. The +following health checks are available: + +_Datafeed is not started_:: + Notifies if the corresponding {dfeed} of the job is not started but the job is + in an opened state. The notification message recommends the necessary + actions to solve the error. +_Model memory limit reached_:: + Notifies if the model memory status of the job reaches the soft or hard model + memory limit. Optimize your job by following + <> or consider + <>. +_Data delay has occurred_:: + Notifies when the job missed some data. You can define the threshold for the + amount of missing documents you get alerted on by setting + _Number of documents_. You can control the lookback interval for checking + delayed data with _Time interval_. Refer to the + <> page to see what to do about delayed data. +_Errors in job messages_:: + Notifies when the job messages contain error messages. Review the + notification; it contains the error messages, the corresponding job IDs and + recommendations on how to fix the issue. This check looks for job errors + that occur after the rule is created; it does not look at historic behavior. + +[role="screenshot"] +image::images/ml-health-check-config.jpg["Selecting health checkers"] + +As the last step in the rule creation process, +<> that occur when the conditions +are met. + [[defining-actions]] == Defining actions -As a next step, connect your rule to actions that use supported built-in -integrations by selecting a connector type. Connectors are {kib} services or -third-party integrations that perform an action when the rule conditions are -met. +Connect your rule to actions that use supported built-in integrations by +selecting a connector type. Connectors are {kib} services or third-party +integrations that perform an action when the rule conditions are met. [role="screenshot"] image::images/ml-anomaly-alert-actions.jpg["Selecting connector type"] @@ -88,7 +154,10 @@ For example, you can choose _Slack_ as a connector type and configure it to send a message to a channel you selected. You can also create an index connector that writes the JSON object you configure to a specific index. It's also possible to customize the notification messages. A list of variables is available to include -in the message, like job ID, anomaly score, time, or top influencers. +in the message, like job ID, anomaly score, time, top influencers, {dfeed} ID, +memory status and so on based on the selected rule type. Refer to +<> to see the full list of available variables by rule type. + [role="screenshot"] image::images/ml-anomaly-alert-messages.jpg["Customizing your message"] @@ -101,3 +170,205 @@ The name of an alert is always the same as the job ID of the associated {anomaly-job} that triggered it. You can mute the notifications for a particular {anomaly-job} on the page of the rule that lists the individual alerts. You can open it via *{alerts-ui}* by selecting the rule name. + + +[[action-variables]] +== Action variables + +You can add different variables to your action. The following variables are +specific to the {ml} rule types. + + +[[anomaly-alert-action-variables]] +=== {anomaly-detect-cap} alert action variables + +Every {anomaly-detect} alert has the following action variables: + +`context`.`anomalyExplorerUrl`:: +URL to open in the Anomaly Explorer. + +`context`.`isInterim`:: +Indicates if top hits contain interim results. + +`context`.`jobIds`:: +List of job IDs that triggered the alert. + +`context`.`message`:: +A preconstructed message for the alert. + +`context`.`score`:: +Anomaly score at the time of the notification action. + +`context`.`timestamp`:: +The bucket timestamp of the anomaly. + +`context`.`timestampIso8601`:: +The bucket timestamp of the anomaly in ISO8601 format. + +`context`.`topInfluencers`:: +The list of top influencers. ++ +.Properties of `context.topInfluencers` +[%collapsible%open] +==== +`influencer_field_name`::: +The field name of the influencer. + +`influencer_field_value`::: +The entity that influenced, contributed to, or was to blame for the anomaly. + +`score`::: +The influencer score. A normalized score between 0-100 which shows the +influencer's overall contribution to the anomalies. +==== + +`context`.`topRecords`:: +The list of top records. ++ +.Properties of `context.topRecords` +[%collapsible%open] +==== +`by_field_value`::: +The value of the by field. + +`field_name`::: +Certain functions require a field to operate on, for example, `sum()`. For those +functions, this value is the name of the field to be analyzed. + +`function`::: +The function in which the anomaly occurs, as specified in the detector +configuration. For example, `max`. + +`over_field_name`::: +The field used to split the data. + +`partition_field_value`::: +The field used to segment the analysis. + +`score`::: +A normalized score between 0-100, which is based on the probability of the +anomalousness of this record. +==== + +[[anomaly-jobs-health-action-variables]] +=== {anomaly-jobs-cap} health action variables + +Every health check has two main variables: `context.message` and +`context.results`. The properties of `context.results` may vary based on the +type of check. You can find the possible properties for all the checks below. + +==== _Datafeed is not started_ + +`context.message`:: +A preconstructed message for the alert. + +`context.results`:: +Contains the following properties: ++ +.Properties of `context.results` +[%collapsible%open] +==== +`datafeed_id`::: +The {dfeed} identifier. + +`datafeed_state`::: +The state of the {dfeed}. It can be `starting`, `started`, +`stopping`, `stopped`. + +`job_id`::: +The job identifier. + +`job_state`::: +The state of the job. It can be `opening`, `opened`, `closing`, +`closed`, or `failed`. +==== + +==== _Model memory limit reached_ + +`context.message`:: +A preconstructed message for the rule. + +`context.results`:: +Contains the following properties: ++ +.Properties of `context.results` +[%collapsible%open] +==== +`job_id`::: +The job identifier. + +`memory_status`::: +The status of the mathematical model. It can have one of the following values: + +* `soft_limit`: The model used more than 60% of the configured memory limit and + older unused models will be pruned to free up space. In categorization jobs no + further category examples will be stored. +* `hard_limit`: The model used more space than the configured memory limit. As a + result, not all incoming data was processed. + +`model_bytes`::: +The number of bytes of memory used by the models. + +`model_bytes_exceeded`::: +The number of bytes over the high limit for memory usage at the last allocation +failure. + +`model_bytes_memory_limit`::: +The upper limit for model memory usage. + +`log_time`::: +The timestamp of the model size statistics according to server time. Time +formatting is based on the {kib} settings. + +`peak_model_bytes`::: +The peak number of bytes of memory ever used by the model. +==== + +==== _Data delay has occured_ + +`context.message`:: +A preconstructed message for the rule. + +`context.results`:: +Contains the following properties: ++ +.Properties of `context.results` +[%collapsible%open] +==== +`annotation`::: +The annotation corresponding to the data delay in the job. + +`end_timestamp`::: +Timestamp of the latest finalized buckets with missing documents. Time +formatting is based on the {kib} settings. + +`job_id`::: +The job identifier. + +`missed_docs_count`::: +The number of missed documents. +==== + +==== _Error in job messages_ + +`context.message`:: +A preconstructed message for the rule. + +`context.results`:: +Contains the following properties: ++ +.Properties of `context.results` +[%collapsible%open] +==== +`timestamp`::: +Timestamp of the latest finalized buckets with missing documents. + +`job_id`::: +The job identifier. + +`message`::: +The error message. + +`node_name`::: +The name of the node that runs the job. +==== \ No newline at end of file diff --git a/docs/reference/ml/images/ml-anomaly-alert-type.jpg b/docs/reference/ml/images/ml-anomaly-alert-type.jpg deleted file mode 100644 index eff726155c32717ada1aa7ebbb4b00764b03d28b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 105391 zcmeFZ2Ut|ew=cS($w63ICWNJA?sNET2~P+C9)L_l(sCPx*? zNkW5UkZuuZp~Krc|C#?d_uYHuobP=1-S^$uSiM(OSJkSuc2)JS)>>QQByj;?yl~#& zJU~JM0Q%q`Ktutax?vu!0AOSUoB{xV3Lqt61|T3s0{#K99st=-8URj!cL0E-#Q>Dx zof%A*pOgOnEveyi$nP{H?MFqeD>?=S;QfkIpo@!dkh@)1l1H&4^s{E@%M2Fb`%Wr@%9Z;4bu?%*|{o6|0tFb68u>u z*h@pm(&)0Fj$fdQprZ6?>C-}B>p*8$Rnv3l|J4@EX$bvmkfEWW(xGzFet~W>XH-;F zWKPS<$jVBAHKc;VeS;mtqE|3CVyQWd6nC z2l>ZXH3k2<{?Wic8u&*8|7hSJ4g8~l|NAuXZ?xm$3qqVw5X%6>Z9w@l2$lRmyyPz^ zCw&@F);2IA`vJkg>nAPxlVlv_OPoppD#_MwQ9s_oYDW1+*jbQ~_58lN|~sQVM) z(Oq=(_xC1c1OOl3;6PJ7Z9z*bYeC9o5Z*BX)PNMg?C2EauXXwI#UFkCe*fA3A0PYU zKVTpLj7a_H>yP5UM=(2sun0twwqW(^&OuH-Al(Y4FHZh}!624=1C|Si2K)b@M?so3 z5F8*#ul=Ciexq?e=<7deGEfq*&Q((#03Z_qvFb5L$3Qm#ICdH=7rf!*2KIC8GDx5H zb@uQD>02PJ>E!O{4AQM2E$QRq_k-R7X>qW=KLF6Ng0%AWKlSPMC)&~R`XBu}I=cNq z|0WmM5*)-ik05_<$MBz-|IQB|?+{RTKd&Fq`x^ zKtIzzc)14ZX#HO97zk4TY8&EZ_D8w9i@_iL-*mrf@q3*c9_E*Rr-Qt%{4t)J?mB;z z2ZosbF@6_cJ?-D?y9S>BLw27au=HPi-8~HcXdCQ)6+GPhCX18fzsK+5tM%_bgDn3T zo3qO~-QVlG_?rD8uYa)iAH4j%f3B0C`f>5r`=dPQ#+5(X27{CH_kR3?FaM!KPsa;C z=H<_}z&St{xCs6U0%m|LAP<}YKaBt&*d+{{zW|`)=N}&E;pQGJs0FS#7eNDGCuvDR z+0(LT0N}^^{ZR)1b|-(XOA>aff0y}#{i~b<*H8Yx%dT?+Kpi-bxTybKCQ=9h43+>; zd&?;#@WyZbk^Q)l12g~wzzT2y{D3eZ4(<@LfFhsK0qK4 z3c!HdKn#!oJOCa6SpWja2a14Fpc1G9nt?W;8yEmS0%O1oumCIrn*bWv2aZTcNGM6@ zNLWa?NCZekNhC;QNt8)6Npwjrl9-ZMkz6BjBk?5(A%T(HAxR)fBgrDkB`GGUAZZ|J zBk3a~+A`2jkB1*`Aw!X&$q3|h-udkA)mdK2~_F_1M(0T{=3t6Le?ktm%U3 zlIe=+I_T!;u=FhS()0%O&h&8lZ2DUIVfrr&)C{5wS`79KHyJV*su%_twiu}yPcZ5* zIx-|Pq(o#|lv-3-)Lk@Fv`6&dgxCp-6R{`iPOOP>iW!LA6e|(? zBu*y|6%P>46CXWEaZ>rD*U4umKb|5zrEtpqRQ9Ps2~r6~2@i>95<`;Yk}8tEl6jI7 zQpcpUq;5!+NTH$%kr9!xmPwWAks+K`IPHD<#p#(d%x5l~i9XYK=Bw-} zStr?S*-<$6+40r3qzL z<;%*6%DpP&D%vV=m1dRis*0+?s+FocYEo)mYOmDR)Wy_Y)L*DCX$WaJYUFAxXbNgx z(|oSE06h+MgyumP&kCP)KKt_Qs@6#@Pp#KlU$jqa2Wr=7ALyv+MC!DjgPhYl7k}=9 zF0-zgZkF!cd7<-e=S$9`^%V3X^jh`F^$qn?^(PGY44e##4bT^qE<|4FG(2W_#W2fo z@#3kAffpN%NR2KSr5k;^BznpBQk^kiY+#&jJbPK}a=_AF{dCfOz{re{oHraf0! zt~gwIb!Gpm_SKZDGiKsuA!hC74Cc1x#pVYV=Pc4J7A$2fBP|E4xUD>_>a8iQ&8=Tr z@7d_uJhEA~Rj`e*9k&y+3$yF9=eGB@f9t^D;OJ0wjr^L$wO7}0j>e98j(gV)u0Oqw zcG7XmblP-2>-^Ap-9^JC&1Kb9!!^xy%}v8C-EG|+>Ym}g<)Pz|<$>|k_e6Leco};Y zdJ(-Xyvu#4e6IO4_%i!?`F8s8`-S-p`Ahi6_%8&g20RKt2O0*x3?d1#3#t!h1%cAL z5b=4b~~T%uEAf6|$x$4SIw*W?c=iYYk{C?5De7*BPmzBkf8~Ywi=G1Ig5sClFB4wk3w;Y0 zicE_-iKGR{@G1_U;`Jv0OtGD}ncSny_53*OIx4BQXuc2SLzivQrp!S`@ zyPEe3?`u9NeyAN(8m#}Q^09G9eW+y^I@~sLZlrtEV02*Y(%8_r+4$sy?Zm>Q^W@r; z_Y`LO#`Mw5txx2i;%6CWGv;{b^5##@moF$TyhWWuy<5DpIJ4xmwD~#cGj93r3f)S^ zs=#XTn(SKh`uX*b8#WuuoBo?eTQS>A+fToUf2rO%yYmihiC)G8Vu)W8ce!>8zsY}V z-@Cjwx9_umbP$i_!WMm3{@!zFakzREilf9m#-GGD5)28Sh(1K3zhj`|Pb>fdKS^Cd zlkis%C(u~|0Lw=Jpt1Q4U;nJ}(=_}q3WTFSso(B@qJP8SKfhB1fc#bfxOxi!E`s+b z;Qa)c7|wz=WH4Pl3y{jm{gHs*t{_)A*`E{%Z5;?rGKj=o5I<6$1OWU5k$6-`B;sF! zSZ5jlTK9j$^*?Gez6OBjH)VCrFTOjy`&&nU`uq*^g9X3K_}4oqxtyH*j~2h&iLC$= z6(o(+5JDmZkTQ`#m`I47fB+~nIS6RL>vtX`q!2Q43Q8(!8d|VI4I^kxhd@ZlAmrpf z4sj%r;P(I-6FKwoGg=fZCXSRs0j#pOQ*)_=wJX}#Ooz}Sa@PZ+scG0bIJvk*Pl$=1 zJSDH7sHCi-s&h{Fyq>7D=j80->gMj@85A6HBQz}h=AFCuVq)*d#iyk| ze3X&-_(@h?{)>W_g+;}$Dyyn%YU}D78rwTMySjUN`}&7RM#sh{Ca0!Ri%XxES60{7 zH!xp!zwPZGV80*!kc$L>{3g~PlKn+4CQvR?GBOAm13fuQW?MfLNd{&9gA>R%TT?1bb;O9+G<{6|YgLG}Ar zfB#OL1wAFA#A)Cdgan*S5GDW$5LR0FJg7^trW!hCO>c980z$ch!c~{A=bW==Yeik9 zfAk67E?NK|w3)}z)f0iGM*}!M9|B38*zJ^r}Du;f{v);Q|jJs%fEieAmc~%o$;Q#By_Su0O$Fk_hjg{l4!blQW#=jL$n7l!1 zTTdF%y!pB#_^BkoL#Rq}=nN(0P5HnVfnsG788CaS4hoSp#}(Vguavd67PFzUvnBXI zkG|lTBXY>M#dOen%d8;I>Rhh2ihGv+>~l?a{4=IzH+<@*&Gu;T9z-3u4``|0Q(C^DoGku189vhgRfY$~igrta8KKH~5YP+bI5Fi2 zTD(dI*S#vX-L234m3L5S zT(1dCFk>_qn!M+Jr;$Q!YDqzFy>9(XIqnK zCKi4S>fMwC1%K^21mZZ5Q_R@>_G#XXtk_sT+oEKfNgqKbsbaTjyG$OoXU}MS(l-&T zM4$~6W+6+H0b8Zi8tJgeqLjFevr>vioWEm;%-QlpU##8X)ZhN##9XiY&{c)!I9L2r zfBut1AbOlI7$uD_ZsCsVgH~K*pXY^XEkz6~B)o-Oh8UywGxmC+F zeHjmSJv%P~GGx~jaNi{iA?spOaB6N|3<|Y ztsk{EQ7jJ}OC$oU4i#M=icv_cXiq1@)Y6j0w&8-`of{{i_vHuqupFKCtkFANr2AIR z20SvMY|G07&vlNaYQN_Z*HuYSI3y{?gd8Scl&K#|jD$lx5gxirS-H?&^E;z9OB4GK6(H8#-`wWz~7@w5A4`5ce+m6)2 zUd?yinB%N%tgNn$kmw31=M;%hvl)P&lom|(FuU5nk-8;XcT3J3?&yG(zG}O+J=<&; z+737uav@rv^4?(kJllF^7DI;&N0^!Ban8~SD{uR{QVQOSw9POHbVcCCzVGeUTH&IjK$1rVgc&xH(f3rGGe_V$d`Y`)= zYL=B^_w6WGUW;AFg<08wr8*oN%YGoHCmHGurGQysdhT8-49)Xsp+G+flFkkQY>ND3 zK;HMaO>(7{d7iy$LO@nj4i5NGk#^J zx!JSib_S_$;;AIh3L+}h9gZwuG-$Ey<0yeCi7xJr8SFuixl&egLLsfktgqA-Th0x+ zQ=CqdMLqNzmz2pS7G-c8tCr}xRx2Xl3{RRKD;iN}K_4yg-wSVG`u(IV zE=5SM;swp6_Z##)g1m3FS0{)7+7O97MTERjjTji+T)|*O6i{Pp^A@n*hW!luB2cuz1m;}3YoAEs< znIk8o;j+~RTF3BkZ!439xVvq49>^}ZsSC^yfvFzdkrruqo3kbd>?+1>+YKd?h-8Gx zp-QhRA{G!d+jaFrZZXa$BZ@8UY-hBV4NnblKD+(N?CiTD?gF+VTSB>eUVN{kF6(8I z|BZ=~I80d8Bo9I(jiKL$fBW6BEkBNB3_{mk0!uc3{u0^0LUgu~2ed%3LFl8!p1bAh z5R9Sr2+lCqle%*A(wL7^bM>=Gkr)@}fGw@E{`d##VhiP4O-Bk*@5c`mkyY~@M1Vew zPbOd=u_YxtZ!CB1b9fBZ#}+kTF}CS29+X%+B8KB|r5HWY!EGE{1~Y=6D0yx4(n`nU zL@xZyyG$i1=95`&>eXq!L7ygaP0k9Of$8Amh=3C!AqN_}Sl4BIPm>WN7PmbsVq|k= z$oosVj$YE;_aYZRzoht-UdMaAu{;T0#jrJhWMFCO0k4+11=Co{jy$C!_3-BY6Tc#< zrdlP=yK6^Ey~xLbO6foz!`>L^dXKcN8wt*~ zu?!K85m*PE3swHx2OW_sdnyN6;`IV*FmC(Iedh|Q?F5tShx;nZ$9$^`*KDf8EaM%Yf%|ms(4EYA*`G=={RMm28 zLu%8MALUxd z`K5=vI)7YkX2rWuEdBj;7z_R(5pb26DlMPi55tC{kPyWpt!5ue!`hMQ$QO^8Ifb>` z48Cji+E128<|W)R(PahxHzyL}2w^1(0`HR?L2edI5FQ%XBkiGR4uXMsHUa-L-)07l zi}MDNv3`5J_;b?#LlOgVlrXPJ?ZJI-utP)BJ;X5EMJDL}lI4X&W5FfZ#ZP;mv-bS9 z_$&x_LXihuNFu;h#<3uSODk2vAC=?9)80l`KSY3b4Ixt%#4pwIk0{NE!1LK1JjRa* zbbGzbD+Dbi2J}60_&cwNz(?N$0=5n`49UZQ<<>f{6KlT36 z%RlqrpY`$2e(>M4PZGxnE1IN4pzqWOe7=zg497`KO7sx^QgW&gb6QK6t#=Z37R_I> z_{^ep+P=;)sC3PwO|hV`D~M;-swa?Ed$*Ib*-+42+!knMmWzL&x*_od9h#)hhvDd8 zk5a@&mRNZ;D(2<*6c7-qcGe%B2nPptze|>->Q9rEc=_M(s2}-&-Wd>hrY(?(7fNBg6bUsUU#sfy8fAZ|fP4%VQOMPK15&@y)l&%{sxr zn_7SD^=nz0H-gd1Lbs$%bX_-9Kct}E^Y3(O9&d_iZ*S+4miF}(vCNI(d|1^dTtQRx zirQ5KpfCbFOzZ;|91!|Vye_Xz?A8#UP9ir$_dGk+uDU#ao&hH2^kO~D`k5@P4BPr);Jkp);+)Sr?zKLu>!vCk z{-%~$gtiOC*E)|S#|}R;8~iLG(0$O&Fx|61inUnKa99|C8H|gf_3pN)VPo5}dXj2< zY~2s`4;5^Dt3K+xi1ZPFp;r-W(M*>qIwN>?K??mi+W<{oPaYi{r|xGb=C6+R8ufn}vukFrKH< zpei&CtOv1SF_0H8{$ELu@83tkqa_PA9JF7+u_cRia;=TdsbPK#QR(H@)*jPl)#(Y7 z3L3h&8TSi5N=C~?I&H2r)Ll{HI1s@_r$)`S2!7O*OqrVRv7Sgc(SlWCNGNqgTuJG& ze)*_ay2i;XLUXcuhDu6Cs`{wmMW)d$LtYtf_Q+2b2OhQzokSoeiUs=?y<>&7Svps* zdcMg=wDemOTF99}U(5i{o!;P(YvB~-+4U)(g$DP_u?VTyZBZ0*YG6Mb<65zR zqzemb&D7+_geE#Ry4G>u+bStysJ2-z6*;Lne!GUE%B4eBr?^uS;(}1qbuIE zWgq_pem&)mLuFabq5~(Ef5~=2-C&8K;$=k{Hrt|cyhg1kcrUo-USVUXBd>(;mRZl7 z$mef|vrNKTyw^?@wL)$IxqT$KD4a5Oz7QsfPk}liAz0*MZ4=sNAqR(qH` zK9S+YQi*{EB4=zJJ{-tB&Nv^yEI+Jvf5h9_xZ`&g zL*s%G35SZvg$RZiB%M#L!xn;NP4%uh#r~pxQkBBwW5ckKPt;m9?%L6XckpmF)3*Dc zgXQnBz6qpQprUM{-( z_{#V?T6Wr7WUscHU6g&|e?OP)Y#)>(%!@!nju}}}>T+NY^HKLIb-+4-PF04MQ*>?} zjf4vJ+z3a-bQj5tV$Ek1&paqnHZ?J0j_=+>73-T;XPYEE6rtxk4Vv9VJ& zKemGx{~2QsC2`_!@8kA79KjyGaCIJM#|dHy#K0hqCYuN(6pNQ%7^vtA z{glGtu=^d{hi+eiV}h@565ujW^eI1r^ozU%>9&8q{y$JH@5Wq_wZmV0kH2lUNEUN} zdOlvbrB66@c&Wu}0{WR2e3rJLqvWGea|R3qtbV~ zwsL*Uast&CWcA620Gm798MJy1+N9v9GQqj=(p}&yDY$gXn25ldP)&*J?7Me^TiNjM zn_$$$ZDt~X3I4S~sp?6)1Bl$wiFv}!Pw<6vNSvL(lp+z>9S3J5sVEUx+`JSoeRTT? zJ{>eMX!t_WLJkD!lwZ0GDb(eNY<)-Q1An%2>IAS%GDLtwZ{G-s+XKT=TI1l@#*w^) zmIc>eS?B+msmGA+yuTc@(uWAFmi7?H42eM3&@Viz*YbAixe&h&ZtYj5*1;yBpN{

X?UbicA4f5!U%XO(O)c}mu(u^g)skTo}FkgFFS2pW2v{G5C0^-=kEs`dH3 zS>(5?5ABlpbXiTT*nfDD;h-D(?;cz1IbIWi-XYMsG!X_mLob?rd4#$-h)d!w$A_j6 zfpcAUVFF2qQAMkrIEhhgK7mG*>ozdzVEJvKdSb4otjXInINiM(xQ z<<;YXGCqi}_G=OWM%+2<_K_Y|zz!RJB!!jeyosO7m5I*ooQ}O8T+U$adHAT$>s}0% zZWE_ZevIJclXkf`U%Sm7H{g>n(3oll?l7;*=+O9WVdU_hmxuR`lq*ku&RpWtSSaJ#M5@=#r`828IE-2>EiM}tyggk9bdJc9 zn_ULXAj1{?S(+>u_%_c`*dRI|HJ=Q#EA*VBtU{b06UFF$x%XVRbZ%xN!-7=84RI+= zFH4Q~KGPS@nb(!l%i8N5{70y=atx#gS@$6Yqo#|ERct_Q>!B`Xmx;za@$lnE6p4&9 z5*9ALbRV{YM%^pe9z;EnZ<3+&81`TnK0oRg{DcB= zhEBA)(#rT)dWSIGhjh7^KucU_`WlAt4hkuSIieW&J-9hX6riLYtNKF@#`QTLl;!cc zi*}dg!fjCeHNced74Oh!Rg(DnC989!e>ir z3gN`yoKs;X_9HNj+^_`giL&5cpU<^Edgk20;|Dj;iyVnd!P2?B#r_7)3=Y9q<9&8~ z255E%54BkK!4AsHBRB^?x}0)+d1UA-;{YEYgph#dSC5za3w@{aRPWEMTvSpJi-kK5 zPNjEfGEBgjyq+6}dM$cLejalhRZ!d9+$`VvPFDNsAd`dHjuRC^p^W>O(A&*bNj~&_ z9V4^ousw_3wZx)OGXn7$N`2_i_8U10HT8&&{Q6!5 zY<+))3&j^hy%M|_k`UZ2Tw%2X=RQWUx*yWNb<8y`=(thWy=$EOb!BcP&|Hg&?Cu%1 z2XZV;?z9fCHHGmx1jew+gYK-9`|>H(Mi_?ku^j|Kzv@c}?~&4BlyEGqAk(%@LpQco zWqyFUBsszY|4n%WSF9d#?7%}#+$sBX0KdCBV+bvxiP`jHSD1`_lp(NC4I=GTSA(p znZr7i9~3!zP#=#KUUZmF-$!8`(V2^cl42s@W!Lr`Yd>#=KDe)Lh&qbocUMrXKmFcA zgt0l*h*~@C+!0&r+-uugV}~SBvf@-Ea-_KY$6G`LREHdG7n_)p{J@9cR( z8td!R7L}b`MTL`fFB@MRy4I)}GFrnh{dH-nCFw}m3T75Rauqz&2JUoo7sk=Yx{ti0 z8@=&h%4!<%=>?NYXoi9MdTq5wJ^Q@973R+VV|>XqYyoO#3aLFVEi}H>%*Am(Br>S& zCfUtJ2YcgCg*+-$(?dzcexIooNKi(qiJjsXXg-_=JX8-xL#vHGJSm2m4zFer0bLf+ zBmyEbq+ni)ua(q&e5K$01XDjNWlB_cYi}<_{X83?!=Z8~p&5!o&<&zqjtHVd(mk@& zvlRB`QUiRhABE1uZM}?ky|*8YX{xOFeqZW6JG1*2s7pB?%oLk>q!DJ=BfwMbfYocZ zgI`N3u_PC-m_07O%$jj3jcqR8>b^NdtTbXdFp3New9BNs-E5_GGTQw<9`+R51VIUzLzDm^TEe;};>kT$ z6aD{^7pdwC^0ROueBuTO>rKt?xy{0R^)7+q+9m>dFJ8)rfI>K1g8s7~0Y}T4fUq7q z(=h2%uUVOvYqtSQUtcRkY?8#j@BhucMgBjW8X7ImyuvDVSdaBJk9serxpuFLEAbvX z_SAVHz{c^_t3aQ9xe`iI=WqqKH6*$e#zh1)&%oz}abOVbJ+Fnq@0bL_cL9_g!YeP&{x@GF~N ze9Z&gBGX8n-5)UUo2Qz#pv4T&4#>j6>)??Yw=w>DL z7#;6~oPI#yK@l8$adHB86hD!r(YNf{#ReitDzO;k>Zc#U+rJTE2lVFNCYE3vN;GI{ zi0l&h*PSE9|DP8BDP8@#8G%UtUz+srXyF=?FTU6Hl&X?G>2@kY=K^Mp_vIdVOnCU) zF=6;`9}{%hzRw&P!`lp7jzztjXM>{jdQfmqoGvD$_OU{#@ddPI;$i2seO6brtvw^= z#f2lD>%Ov@npah9UzAaNR5yC_Q+P5?_$i# z)}ap1n{bAsTSM2>;wA1kW3$oo#+aJA4)DOqxc(+Ad0$SU<&o$o!$?U_Svx_g71>+V zBGTkx4qHP@(QfiP?>4#o+3uz{ls~W8% zl~m&50|B!k+e;M~IaE(UrAx^qdE-PWl*kz~$Ss}joaMix8}1F|ZK1K9 z^1As5nJ8}{g_E5LiLt)VdrlbHP!b5Qcr-o@-=tl?rELp#oZh(7!@07;q#oIoSu#E5ST&L zE1mRC6q3E?{@LwV3X)t0&Rm&NgXpoRO6W3NFi^;5o8Xs|UGH8zbHfQDdRMLPQIE<@ z{8oA9qI>PD5ij!^J5wWkzrmq=fa5N&RD`MmYgzixH=lxo`o5blL#|r%+!WjHLGyIi zW=v;xXy~fzxx+5cv9)APM}(PoFDg8){u-HQnb39=GmW8?Wq1M+7m?X|f8-4l!50^d ziJ(tO=yDB%$rP_O;irJf${QFp`n-1WrLMDF2mb7{s$2E!hD5#Nv@ zNWHHXlOMQh>4WYmn;8vwc}3ia2ysx%Se*-pl37WDlrx0v+Qd71CH$La4iudE6V|DHnxbm4r=pqgp{Ze*&_nk^DSyXC^5DQc zU{EzUtTfAm5%)T`zU-#*g&JoR)kfXApZUJ3#FF?>{4)dh{Z06}8o2PLr^E*WC+rG3 zv}=b>DXy7EzTo(?ZRyf7#|W<`wW5j@tpKuI=lH9=qE_d%j>*1$aVY9y`ZQOJxEiia@cPzm2&#whKV;vcuL&){4> z5@gqIvK7o`ZDwRCtRxj@AKbYA=?{W!qjye|*^ zf@d8%Zd77}OwfCYjPo4<@Z!l0j(wExQrZ`lZ;=GA3VP0MM|?a8qx4Wa--!Tdjzg9Z z{QhfoyXT&Vwi7`P zR)(PqC+?UxH^+BDUxVH+U4+xaw8olG7P&X>rx%BX8WY-#*aQvOBc2&wdOKEX)cmFyLnkTw zLBRW#Ttm4v8i`|}-7gTRV@Q@)WO9d-!&HblA@30y|ap3{OPdq7%!dx|=l{pU!vYQ*+I3Zq8#R;9KAG zaAfHVdL0atu5^@bQJNThaw+dCb0O&^lduTyo(- z$07K9@1}IjgZrHy5rA9Zyx1MlUsvt!bSVi;4SU`gl0QACl;M^vww&q=J#DUWR$1QO z?vBpl;B?7?0`e~Onig7a;Y*xiK^|6`f&SFmjUrL-acZOYwx1r{VCosRXn0ebG3!JG zGVbjkl;`XXG7`G^#qck``OSpDbY8B{F`{cT=1%)D&sg{HWfZ=iw|<{{%GNWf;d9P( zo3-eO@Z_e*{V!z%>+;PMoJbTuc!)TH9~~l>?AeF_c%(Q2{f6yBAckK8p`GzvC`w^| z1uW6RXS?8wkw^1hUvXPUJMTph*FK`4*90iB>0kh4W^8pii%0sJQE2FE&kR+g{_rav zS@y-ncJ?0org}s`a&G*C`!#nWu>2&&&-3U+1rjBUJ^;^pply79j0kW;|B}BX^Tv)x z-dyRI5$gS#lh{(rca87| z)hI;W_HGXaZhMzgI$(lJ{`EfTGFuW6pcaTlt`g=6gmNsXi$CQ?e(UZ(6hOt{TBD-- zYd@?$4)o4K_ldy49=to{*KD%Iq;yi21kW!M_?OJ1ecHb-<9~5bwI1^03m||Lgg%*% zhocSWr_GP{ZIPn{*7by!IIXdox8@@bLs4ViLgU|fl6>}P^*q$)1#N8@(>mV^)yKU^ z+`U^KHBJPsBU2`z{52BFFj_Z_@m$RH`JM+bL$4VVg|Y;e9XMEkeWD&(;M&qi= zT0@d_hlcr?Lk=9F4O}N2ICsMQN95{0!x0v_c~7&^p)+aehc%V zmuYpqdeL98HR2Zx^-IUzj){&-zu#4>pPDywoq@;jv*Y6ka@e3wMNQfoY^aw{fd`b2 zA#vtPT~3!zxw=Xye}3J8w(i5o^bh)3lqt|={G`h5`J)`c;!pw$h z%wuWY*-=U542^V~nehBm@4fq}X(YB(+p5RZWeTTt9UTL_$QXrscmohu z3i>~pYqtB5$=UhV@qK@qe|r0bd*G^l1D^+ND%_GsbS8H+I~k z=ElE_=uh)*$PL^>JSD__8IZuAWv$xcIA6n-{-+V8{F#>(xW)W8OF#P2^U!ECkJhW` z#f`OB3+XA2x{qx?=Ea}<5t2>IV$NIShES>7&eN?%cW4R&Me? z4ObwyQzPiq!S~u3@HEy60^jMULE9&2auF{YL4H;X1udO|prx}QA2h2x*e=BTBfww& z_>>y_4T)S&Fj)RUUcDb%*w?}KsNetv~w_FI&WR|9CL>+ub#-RQGoxVa&9!0 z*-DlN?>+sz^EWiemFiPBMlA;BNs!(Q(L|tH?|ulBEKH-uPPnsCquC-ei{pNE$$OS7 z$-_6ClNW64yJ(xG`_HDG?qwj0S!{{HDBTZ6QepirW9(~J)TE@RZJJ^(TfV+r94bg} zK*InjrPGM<8M2M-nCA-%T8zCF42_FocvLv7u3I?cc|%r}b85S6$?E#`*CeJUw*?cE zV;T?7{$|>?b&bW)wnd3zTpJebXfG$PTA)X4P851K%zIA+e18+u+cPNe?zAzVJESV_ z4bFJcj6WE+VyBM8R4hSsH@EWYFDQznqGwEYgb~FVpnZS z=+0ApDSc_XoyPT0_4EgNE`4EnlG2|qJB9av+fn@7F)RnVs#AbVQ|QBfBSVa&*k}!P zeo{ejU7hI{ZE?v?APjQiE&WN;unc#?=BA)P;%H zJ{AN`Z|LR%7@yE^va~H*-jqH%;{iD<_&U*lB6{{eT)0AB0}@i_XJ7A8nTFYrr|BLm zBx8CIs4NV%CPX$%IUYg5cV%f|ycn5nu0=oQFp2X+u*gNPYzIbnIa+=BqbZr1`%F7! zH`Hv-Vg^JEPi@Ue6iGLfuU*ROFDu|BrD~(hW%D!_#9_hHS^!Q5li3NqHz!p;rmj;I zHrU?i5Wn+UU0~791ZW(q)?d7x;rc-2dPwr}7s2EO@!d4RPxz%nmb07-CI(G!Kz;ob z|Az|xoY;bQzSGZwes(J8x0CLp@Xu-<;6f1t*Kb}v=t_97J&*Ul1FqkUBt3#oEn=@7 zj;A@LIk7iccJCG8I!xn86?|n*<`#hiW;A-~MdNA$K@E%OnC|+d`e@WBB(JY{GvP$e zj3{7juf|v#1@H#>hwkaH62sq4(^$uo~#{eJLzzp`kpz6EvwwLo1v0A~d#k zpIzwR)E@O6=Xx~zu%NjX!{ZMyk+q|3pu=WtwhM3tByE&h7_ALjW~=8&bl&rd(NZ}r z?qcHz&Mdz-E$(&K+ngF6(IH`XtGfJFE+p(paIbk!DxDmN?P#Fa-&>##+IoYx>ApE6 zPTxax@z~C4?L!%!S(te-YztJRhVZ4!OnzO{uBlKtA-jjS07yjW-+GV=UT-~?Q#hUs z2cf9|9Twhhe8dkksT(FpxLZ4P#KF9t4-_hv_9zL|PD=gv$5?(vV$z_zaCW6$R<-+bI`K^F-W*6Uogu0e^t?Y1wN?$zYTOOcJfv zjf_3a>=3wLn-u=S^L~P?NXoHzjOqYu>HwETvgHfiG!>>h!bL8Ex51ao&I&w*PYTo_ z_u)7>==9PN<&$Oba5cPJN|?c?q%`a^yv2hdXOg%bnJv)2t$zfr!m<6G_W2lo8m!y` zoe~PsF<@Nb!F|>HLHCE@-uJ>>B54M^Reh3`at4yghJC_vXCw{E2=P9*)62m)5Sjx* zy#e?JQr2ck>Ru3qr)ajE6OYKA`jT84;^g#lkZ!E? z(UehpbLDM8&`EoT`W7_zUc2-t_e?A<;*cJQ zd;A<|x~Feq!oKaWw2?O}9=r%n;r3Y`E$aXhSSf3w-VkQ&Sum9C##phI@&%b_O4Dl4 z$bop_QK|UQ+2=eT>GaC9ly2#}v;A&%%9qU2S3k)j6(!b;nFt?RSTQL*vy}U{F|Nh0 zJe8u@qOT4G$GIkYm@8ROp$EcwCAgbxn`PB)KBu+|GsYJ@Mm(>pp|^2ohh`e5sAjf; zZ-JS^`MlxRG&~KW*ac!L6srRA=Zphh+F(;_sJg0bE^KYoC3(6jX)Droo;PV;L7GO9 z<3!PJTi|J$oP11^875%(OsykF5F1WW73n1P>A+;5cxpq+SGw=nr|(~TPFf*UGCec; zxS&PU_q#_)&KtCHe*Wq)Gaaiq_L^GYgBy;ustZaN#Z4H?D(^(FZ#2RaYHW;nbh+Wt zH9T*PbunatB3~b6wr85p7s$;@hp|oXFq&|M?99OY`SHoVyY*s9&o|m zQUxM>E8qj2W~9`$3S0_~c{qv}cL{^&a=4A8nfU>M2IB~n8`F=*lNOpvb<9Ur#$WsT z5PA_mz-n z*eeB{Vh(50WgA-_D~^nZfFpC4uGnaCMc*YE99ecHgM`es$^jdAF z@{l)1PcdI6ssFqXBEP+@r?$loO~f|)p)@2q;Cvt?SF%A@pa}(NTAMMy(q;yDk*6Z$ zN_Bmm{^$C)O)%rA)KAK(xY4?-|xqx3!I;h)7qEULs8d1QaP! zqbN;8M5POn-b6$Mf&`+{YbXK&LR6$EB_d5~q<85p)P&v>N?1b3bC!F*<$3l#=l%Bk zopYV*`p)@&5LkaCyXyYcoa&&?@+GL33#3XJHG%W* ziCNY5Q86H3P8l0^czE;1`8v+z%CVpkx$qQRL{j(R^@dbB#7{=kB?^aPp&|-*1s_&3 z+sm{36iBXxA`t%BE4@WF?tVW#-YAoSYWeH#esn9QX6P;_wYiXQr^^r3h)A*|5!1L8 z%Uq-z?-x8Gn0@?qF$sp|Qk~ zhHE}vwO?{qtHdrUJixfFKw3UBKi{vM9~XV;{KpT|L!NFIf29BIU-_l9OP(M{3JKzQ z9qu`BOv6Df$nu#96>>+hu6_NSv_5c$yC;kw*jdSydhCqsIXmSG%rS`~dRTRdCJauQ zTv1<2?A-13!cO?Nkhf^_aijVh{M}`!k`rNMPn8KQ4CT z&c-oCd(g^vZ2zEv(S!T64$CatEh_R>`>;S@ZWFbhjT)U8vB^9kNGBt*d$#=C-(#2m zdhX3bg@dB3q|ehu)ZHC%)B6iG$JNUOPM=ymRlN9?q$_dILcey?x{^be4Fv9Yd4E10btStaH|4+ucqDrr#IGh{U4PINne+(Fj84klj|OqXN^p;#1+HU3 zpFp1NZ(#8+C)~>p6|wu=+keok8UN=erE6d@-&O9&QQ4=Ms4t*L2T6a>M1m0YpN@(L z|Fb#WaS(3#j!pf8#xl-@Os$_?xQYb~p3D7O1`)$mBkFn7Uyf>wO0E8&@j}W`%OOsc zR00)t=m}qW{_>{_?khViY)Sro$VcB-3^JY;A zKTm47XO4Q-x7mw+XkfhKDvVA7mHjw-n^LN+%L~yoR0Xa$F)g#kmofRYsNF9h62bO@ zIC^wiE*1vdVx!)|d`BpPiTH~kaAMPsTo$QVTh%DG#tuWb{gDaV)UL~xfu4WRh(!`1 z6pgheSmBL7oCng3eX0(;87D86QLWEAhGpC zkXuk6lz1OPix30`^QpaPR&{#9OjM9G@#vjpSd9nXK)l*ocKVUviSD21y63_Jiu6y< zIS5J51kq6^pry_m$B3RypsbrH!QBw5OOo#JbmuBk@HG?J6!=xq!;d{!{^}>ibpp|8 zp=bZgL-sSNjmofa=udeJl==|SEl3WEFzL;T*#X$CkjJe#-tPX6XA{cHaSMDrUF~Df zEKH7joJe6J2}OdQ@IEj{4`MhkgSddHS*yBh@X9&y-WYY`4>+q1fA{Q_>L{%+evIU; z8?5bEI8P{lXUR|PleH1lm*6%7J&`fH|3faM8qyjdY1XPLTSs7$V{K3h^`h&xBkZN! z;H6!c3JrS1pgFv?jd%)3bBB7?`pLy8X!q^bWIOujXKEdD^?b(oIn1JRUH_OP9Zy&X ztw0pjl6pR<1HwZQbR-4fQH|(uz70(1{kibay-dc&rdw4?Xj=v-?um?vJ;?~lX6PZ2{8Mj`w1c+^FZXWryN^Bz-m#7<#6`i^wk!d+<-QUn5s zP0gntuG-L6Cr;ecO%Z2}f2U47z3xzXPb=*jBLmI0JBH-9VSA_!H^6g8)n7$(yISUj zBX~U&e}2}?2%wvJV|U^gR5W2pugSqHk#{mwAe|a$^aB-vJ?^tL!QTL3t3R$^1P^X* z*@G}WWZ@mvt?w(&d->4(hL6WW=~c5GY7_q}X|3z@uJjF9C(m)(Ik~u6Bx$sX9icj0 zRI4!u3sz21H6vq#=ZF<)npmc57e~}g*w}kdtlIF3dq2DNYf7n{cHU}|imqp-j-X4o z+K{wxv$m|`B%Q22;HQ#@)IdPKeQmm4fO1f`Qv3{?{loWlwr z3;@?T44d0MgwJrTnX`w-`<9{(SC%2;DeaBer5__b_LQW%yHpg1p@Af)7$nlYEO{N;H&7af@e)qSi+-%&`Gp|KO?` zJXaTH(6054Lrkhwn}+!8BlZe%_D$9qU?nH01-Bor?ms)G$?xW;+WO_W-rV4fzJJl; z&25b1VA`ZbO{dkqWXZL=S~Pyji)ac*ci`+khxaK22I`>K*F>v8WFpDN-Y#Y6BU{^VoS*Rwv0@2;-4`ai2eH z;kKRm2B&>U(z!FtOTbV6-k2F$q??gI(BN0#Cp5&eJ{sc+n?cUM z#~s#=DhmU?Q5qHPH*PiD`EmRxy*`FoEfb`UV67kdNSs2&>`IpNM|YGh7u`3mbj{kH z5qiw{YMeTIiWRa7*+gn*4jpRA=Xg0R5nT5z(l`O(g`N|mETt6nm^)zUgqwKC;B zJB(k~8`gnt?j5rtu(YkeI=qgs9?;}myN9rfp3?xsB4KhO1F}|T9x_inrd0}ubQ}Vl zcEkfo!Te_u?=(a~iFesTl(X1LGaDWl#3R7OZ!T81IoxQdR&mbUuy*7H%R3rG((Uc= zGM~Gm=EJvB>6APsUSI!hRh5b}v9OaA8P88sKt@ZoQYKez{(=1ztbuX5@TL4)rtj=w zCr;m2p%$3}a`K)<0{F&uD|4ha=kD^7r&1(~T#tN!LfTZ2vxj1)^ zQPw=e)ZU%*R>?Rsf0fzMz0^Y~xN`FhEBGBi9fLEq%1?EDUdFM6W;P5l@_+4XEU2*O zZx1t}kxurddnJsrWO#FLH2`E|U;mF}^@rq&2D#@I&>aA}`!Kn0^a|lh)KD2vKv)l%0P1m&P!0TM`ve_ zqSF1SQJnkM52)wGzZz@XPQ%enmOfn;t9-F#9OsqA5R_?vQQpnOw;)cBXQ`PJ&@En=a}fG@y6HsY^Lu&jPwcb*V?-SWu@LpvzI5Wx>dB?9CQsc4FcK;k;L_0RPG`hN50Qy*1tJ|8l>@=wkD(`|{>(2aWM zeyl?OwH?S`?n*^`O!ohOYNE|g=56xrl@r}7 z0PDbW;6A~q4U9Cn*r=2N&cx`(-Z{f3 z9&(3n&kfoOJ!VUsFSjnb(w_9sG}nn@^-a8mQCK(4WushTTNeiRenvlFfZ@sv#V*O< zC>UyAbfrZ|p7d^pO(E9E7zkE-2oJM175!YaqtNA5KCs>$&6CD{olojxwIm~Bj5kiX z4IPsue3YxMF{=h+2@GyZuMEvF|Nffl`1$~6q$=hMr&OpBoVR1Hlk5G$W@+0gQb)P9 z`bgqo!!#G*@qRT(i6m5%Gr77W`#HaXIo?A1Szmb8vR1^;t?iqUr`qp-f62t&@Oc$N z`~m$Mesl(U42f6HLz1GP_?JjqwgZu%`FrMdCMToFCt)PNA5^>A*aLUzg_{29AZ zqYs9J2ZbwA4Ukfcys~1fxwf+R6in2JcOEeJKiZFLqVmv_PlE}95}^3w`Q0t*W!--o z%DN;oAi?hgf~O9sEL`3h@5x)v@hc!MxM!;R4RY`vO( zJDm&j!R`JEUuH;RAys?mm_SJKHFi<4h}9p=Vat+Zit080!T?mU7?Ed*i(yp3!M#{^ zbIcx+(*|xwrF$p8GGkw5Wm@GoMyq4vZg8I;^Rg{Sr&e7{mfC z&t9N6O0&n{5Z$3W;q&lpv*B_UP(`4UXsPYtmve}>A?W5&5v^*x4!uaZofTm7$v@w)ameq_L6W^=5P`53OPTV z(AzA%6vGPZp`P3VA2S_brDXYuZfQd}y7^uDH zI1_<8;ylS2RnG~BJ{*3+eqo_*NI^|HBV13^Jlix;&!Szg-}pLID&P1e4aR{e_fnmK zSkqWhPX{=EG>7}$ESVvDKyO#X4GcgOak}GYZ{^oyrcKZ|MoJ{Qw-~j-c-@d?C$ovV zjhbha$<+eB9IG#CQ#(%Fy%8i(JQj7OK-6ux)5rsopQA)({J%^R78=3in)C%}Ea=0z}$h;G~vC164a^d9fIOoiN= zLp-~=aW?-}B7^>!)N}D(2`y?21wuL6XYZZJ}~gP zew@xlGAZPcvM0*!Z(kr6|D1W#;`Kse3iiIZb8wS!d;Ud4b?v3kU#fiz&c20!>C1S~ zf87w(2eE<%L$>REGPWfpB^DbSMMeE~Q9K{hwQt_M%W3j}<_lP@mT{jC3}L^-Zr>`K z+26#1r4-1FU?~MX5VT4FQ?36(fBG{O>VNe2sTX|3FQUHw?bU^OBI{#qK`U~bDdT9+ zroem1EE(QQz3z>yb|cX9{&n33#VdI08Ffj@>q3&m;(RI(DJ8Uag>U@~L)I}_^>(L- zWNxlf4`T)9PM%O7hh)(N4f`h8wyf(PGf-37WYkuF?s=0xAuOpsc+KIMI19ed(lg$K)|i!Oa9)A3-5Io&gT$U|g=-H(>kQt3GlRUSMpH&njo zI(X-M)GV&^(OVsS_p|&H*^e>{kDUnqc`EN&6{?#l(3u=Fs3{8Q5^OSw1q8Yla(H&f zhgD2IG&C>p4$eX(?VUV4N|?Fc!GDr-l%5}skt0@yLS5y;UYLc+QaKUwq}EIV3;iwh z&8ph!@Wy@9(F*T}ij85Fpi`a3D{53^2O) zwT3TqGWC`vJxN5h!*h{ShkfITpWIbQ$*%F!Md&4paz023Xj=yzALa>Xq7^}a6w4TR z){SEdZawKwQV4QRQNMO_SZ2GGhvvO3yvGq(x(oawu+ae0T0~T%oX~Rh{wwhcw9zmV z+>)RK_sAO2PNXG>cNz%9oMJzIwaKe@B{fR@K7NMIh1*7w*rQA};~{wZXlzXfATub01^ zbb!_7X0<1IH)B~fW!%rO*fh<~#Gb5jCB#}a`(ByR#v62L8$O~QjJP|Kc-V6%ZJV-B z-u-d>vh`?)N6^(}cHoBba6Kz9vbfcxy0BTPJbsQC8(O>J@2;31cJ%{^W;;>w&FPXyBL17H-n%Z93^W{_wbE3ga{Z_khLr3V!wA*}U0i2(-fS>rj)+09?X;;uiY zxfgY}!cXZE($?i8zB|YWVb@+zB*hHWt>cUNHlFk5va2dGcsJsQ>Ew2M2Z7ho zAHozbD8hrk@)~LaHr3Nd*&ybjf-VF0gstd$DP$vx1SUWbldtDqC&S9%)(F)4Lb5lO z%FcmBX1w#T{=Su^`2;}DZsE)tG=+(`tiilsaAO=+53!k?bU83aHI{r2BQSh0U;Hj4 zCXN$NJ@Z|g|2m!iqpm|;ZRzmKK!oO5gk%4%9G!9~l@%y2?>)7QB%%zw3oL;9#-(*- z(_$wklq2u9iCn!H^X=V9N0pPR!1-l1e z4`f!IiKVw-d>W-Um$BEabMhuRTbAoMmZi(e4qw-H2z~HSGl?qB{L*4T3szM4SF-@L zIk&eqjvaagwWGh~=HJDUHJ=S5|@b{yONd-OvO zOd(-lX`x;|_=~odX|^Gma%<`PuVf!@JzlWb*gW^DLm3+Q+BsV$UvS8H5h$3=lqTpl#on8-?DMH{*}5pV zA)rs!Q=9!vpP!Z2{Mo7Zto2cCB>D(Y`bCCosX0rkzH(4uO+o&lwRPv$26ooM&3P%* zvugz}p5)}YtNPTJWKZh)x%%-Lo*;ftRI!gg3Eun->Eoxs;!dC(Z~wnoE+G8Fm!%ZQUSK5435C0DOJEths z0BR1!4cs6{VM)kHgu&-2xSJsI)}ROM+Hg^~s`nk?8;*fFdbD~2?F;D-*T}KhySWMOq_|^MTX8m3f1L` z;KiQ6O^}ap;?SoIm-8!KGB1qOJ@D&W3F`{S1qrc`Hd_lS=h5p$qAc1c2QEjbjNHv| zU)C<~HhrNS^1%YnAJ=gl4)=UmLmY=(d6E9wXzaM;^H}Mo9fUE7f6V2cLbXS!OV*jo z7VSOX%Pt_)D&);K#c5QQiny7w7vTs`l6wo}L=Z3h<6MhsC`B5tvL|ENWH>4^uvhVP zE%wIqKWJpSjcMBL{7f!hSF1Lg1jkZSZtcKm-6()miBE1m2qhA0UnD0_OH&xnddTF6k608j%N@qzaJBkP^)Ts z>8bo#8ZVo4bg4v3#0FgmQmFR%S1EtH%l7!EuV`8)k&o1> zm9PoQ=}B?Ip+oJlN~*DwFM`p#5yPGzeHx;aC+~ON6_q$~tZei>FxX_q&)Z|Cl2g@Xy@p(7h(-l4))mW?;P;3- zB+lfxlFAJ-(Ix0~ob>y=bGM@LeZ|MbUjCRX_f(!*~7pB7@Xs;iH_DCTuAYd zYfMzb-SSgs?!j;0zO7@y)@fv{-M9<{%@I6X}8m zMvLm4w~Ej?C;JH^$&-Ihe)hEVc)bLWG{?3+fh5FUX$+Dat~JmYZ93dKeRA0PN&1BG zx9_jHv*>V~99^038tGFQnVFsH=(WIjr{78t&)3=kiV8`(nM(;5!shOCmXA9`)M7S% zAv>OpHFmlAxXCwG>5=D{@qjiyWF$DJ3~9;%7}I(KrXp3PxsQhlDAdn)C1K?77?vACh`N~H=ZQFpHs zW9+T(E;_R+OS&BdH^NF#kqU;sb1h~R)qn_&bXF>H%p$h}$8aFRh*Od0xil5k_S9`0L7an{` z%wc}pU%mfn^;Nm?8-()(cu!tv81)j6bT6Ii$)Z%QI5kr0`DNKG{<@g^`4675>{i<1 zKxDG$set63pfsyVtEq{csqDg%TOa!S!c0whPn>Bu@m%rO&IfSIrRkx5+4+krl^@6? zNHYaKxx$*wSs#u+mAHAnT~Zzsel6jSQRlcomI|g){TSW~luuSRA|`KQe2P14xs%Cj zV;di`OipY}raZN>DjsfpC2&FMRqHdo=AVtADz4t|-Qqv~m`M`Q*j$i=00ym>D~pT& zvRMCH%%Q^N-xuZYb=)5V>3mbrPtDRqhh*~HN=ydJ%|Aavf@%%mkU4o3%l+6mhPczG z`AIZm!^4gD(Uo7li#$z<4a&|gc6vM=Jpo0a_{yMXfhB^i&qML>O&vD!yg^39-1a?- z6lc~op~W%YZ(5sI>3KEWr$A->%QCOAaa(|t52gXeifp)z@F#`;fUs){np1QJC=4EX zYt&OfZ|=8c)#A6y=S>GNQ>pvgx)I%`fafJmt5l|=h|ykfR|po>+8XyH$JKos!n%M@ zi<{Kbnw}#gKDrxieM|53yX4qn(Pv+b#IHAOUWhK`*CkmL2TenG5uUS2E_iA42eHih1yL56~J*PU3wmacrXh&nhG@O>4e)I** zB$^{z9(}AAvvvieqG)h$9fCbu{CG>@Goh|xr1*J;`%FnZd0uBrA|=6~EOy|eItOfA zxfwN%B710eY(19sPZss?4wTYtOZdclx*bMKdl=2f$D~06-x#KGImi*2A6KV zG~h_Nm{IcT-3ucM2Zo$T%G;3iB<2yYE$9g|o56YsitUK{jSui(d`_(h`KlPK}`wk=pfKUExm`ulw#PDhF#aBKQEW*#b=144b<6$7M}HY(S*! z^CHzoCM(ITEzJP+Ga{|657C#B#dm(Xdb>X@p#)F|!QkkPLw%4EZUuM{0|?mwwqZM{ z-9{2iVhKa%XJm+Cz)S{fZud{7QgX(Ozh1SyO*{5d-K-hpFqbcFL}jr6#)nD(qz%kU zf-%FPv!fU zS(h40Jp;(y3A0w+(S2Ug_t?&Ku$=km%TQ>D6H^RYRdicF>AS%278kYk51MZhD1-)n z<`gi8cMbt*D24@da-&tdax+#SB~?#-oJ-@&1L6ZlKm>_%ZiNzW zW5<`s8b_rRNs>V`^mGujnEm`{%sfWYoH6 zt<|r;L292Q0z-6bNXN=-R%)%0&JV2PW?PDBUWu)xmb31a%Hy*EX zCp;%lG~b_#9levZ-=RHv2nyQ}Z-ZQaSQ%y`m^edD15n~|+3(@B*M<>L zxe&SCe9ZHsTP%0vgS&TNc7-Z*cN=HpRXF#ia}MRHKlyqad7#c4FmP52m#w9D`w|a- zW-U9u`!-qs5qn?Td@+NV?Ma6AMYJmTHM|VYHsLT{s0$|Q51LX;%q|AU-C|m=jAR7b z?hce$+sW7rT34&OUGx;J+t`RVcdmX}!Y1`)%k&-%4+i&w5F~_90PEz1H=1VA0LJfK z>v{SLuUmPj>6b@!4xIKE77jJ$*6ft7$bYhK3j(A6RV|2PY&T4LjZOddYIrQ;Pl%%X z@RJu&+Zb@QXqWZe2Pt}s6uF$$XK?2`iE6GEmLu$^w64?9mp=hl6StY2HRU7X4BjSn zR_@mskikc+0L3BQtsS3`3|;M0S3j?LH$uNDdj8tWpaP!6>_bsgPyxUKy9Lt78TVgY zI2c@qm{|9o0(}$g(un6F9@~gokF}x(I)So^{1oY_Y{&n>z_Uo$A7O!E%Z~+jnx~SHWj-^%_JTn=1_=D}WgqHVgO+R#-8w{)zLab8`7(XOkmG7V z8B$OHgA4{(A0S=Uusdt?AS&faRW!}dzSH1qLhT<#E{(7qQfDY`s0~3sYTzS~`LO{m z)nA6sx{k?qV*ksI#H!JaoBgF_=wEtO@W43lQH>NxBmGm;x_{}GiCdJyI=3V2FOBE_ zxnD)bI+?H63;r?&kiT@xK$x5XqoHf zuvk&;Z46*CzzSG$zKsM24mz-eOdcnftv!}T@C1GBJRG5u=X2dPhQA#EuW1%F#`l_~<`Y6-q?*{g4NgR=gR#ASYN6*Ft zM)8;pFpca)>(OU^ay3vi2MrNo8AiCzLZe^ECgtAn`~Z9zZHix6%d>2A@$lg(w)6d7 zJ|P4&i%QYhn9m1R9!gL_FPkHX1=*l&4^+;vu{1%0%OD{yxI>R;=TlY!X-n#c7gb)_gItpcYJ`;BrK0Q@?xnQlSk?aR^s-BVk z+VZmOw7(`iRSJGN?&J$^o6CnFzq#Q7(it^&WsVIDV}`(Zvb2+0d&DZu+_6rn4taKy zodx`9ob$}D?k~|j)42?bgwX~a$5BB3&g(HCdX~P)(6k8#s1SErmC@W=?2Xrb=qe+& zWbl<--&8pSysc&m_jR3OEo3q@Bk{K5i$EmmN@Vh3C0b10jq0$ zh|6=|YxzGtG>@JW^ERZt6~D@&AfoW@*S!*WZ~T|quRoloK%&{~R~Yg3#=SO!p^MbZ zE}Ax_f#)+G3+CwV5GHE6bz&5ZDQl6A3>?l2_AnHT2fRKVR>2)d@tiYgS7%PGcB(nl zT@F%>b;N!hR}L$BR2xTFNGPVK>5j6sT~gWT9Vg*7IX3-RGa%qMVf`OCP1G z2jo%YfFlyOKCu=1`J(|#=3tdioR(H?uA*Zo&*!F2<2$OOMF`KMPxU9MKOrpKbKI@i zGZQX0-2O$g*0-*0|FBsm$l95yJ_bF%9$U;CH2^10Z z!n2Jt&5lH=>lOk2l*_I%88EG5XRGy$w*eNRQ^?cyjpgw7;zE0@_YwoDQmv)63V|5QZ`eEMMlR~RqO7(C(9hY zT~}Z+m^orpm{e5Vd0XbY-97cvqfHjXI0-^!qCY%OQ9mlA z3I=tBKZw3>6j5<$+H!5=nZn533_SHEx+a#=;M-y%$nUnac+PC`PWT~5&@`-6BDNmP z0|9FYYRC~5=Fun0w!+d!!^$;mI>(lAp|xIHzOL^YG)g(Q=gl3&lRy1>9d9&%!>lJC z+8|VpvZ0Q0IMGHdtGY^1{fYiPLE4sT`jAb#0fh}-A5?N~sX-M(BF~B9Rs|<#m!Fpe zPCvWu!&JHs8^1wilq9YZ;$GncIp*Y|YL&8NNk6d|S52YdCeloX^l<g8)7Wc1YNrTh;TWVw0Nn&b5tq)-<;=m^?8E^dp$?SHeGwEe@UH+6z;VqFV zFA=%!>1*-Fg5RBiMX5?qIIPG$#5M{?z>RN2bm&|ufG4&QVy4xI^=R?E44H02-z)q- zD}}DbnVYvS4}A8#DA{neSgm=6s0XH|HQ~3`cMg@|J3rEWKNN$%xT_bd=B!2aM`g+a z?L2gw8&$;9NAJe#yGfL9k8nT!nr!SV;P`o4>o9cuNR7e=j1x1+VSv9q2{xO8btb@A zSFP%M104?M?0KbDUvN4;`dtYHE>frSJ6jJJ+o44BHie zaPGYy|4+&`iGpr~J0wc;0&txuYD$8=x(Ox5gVjT;h2r;_>QC*-_>x(a2K3Eh``a|S zpR6S$-;=+RzI`M?Wmq}9IO#%C1~o+{fWzhwZ6g&)avL(lzPlozI+6bW#=)EDyQr^9ll4an(UTo~-E3d8z zduw^^Cy2d`HqJ1xejrwob4fpwNH4FfA=y0>*P7C+nJuhcUwBk)mz6=bTNir}zowNX zHje8{#70GmR37HOc}XDyJES#&Q!I%>5$H$Qf{^Ek>!*oTKA-)F4Za1Rn$MpSgr>Qb zxk#;F*RP8#Ws`1A>7)82HV8U>Bg7KILO@SdFte}nSjeA21NQNIikkS@j2C>;1 z-3ZIu5wFB2Uw`2VHUSIJa*(4)=c~S~Q29!Tsj=$fg_ccxE>uajJnMi%*im$WR>VM9lu!drLXyc~pg!1^@bvX@V_nJfP zA*Q?CF0+k>({a*k75S}l{WFPvHmD9CYt%BD?$d24_j03F?rLeGsZ&kZwn5nQ8TO#% z?3WC}mCg%o3W`h*Urg>|X@kZg%=Nr|(&qz&P ziAD#NW9!lU$-=xS&UtWy;knV4hj1dIU+&I8TrG3LDhX@nw-uGYx(A*=IG8@4Lg4pj zDs|+9@V}9M>=83o98LBIZXDjFaDD}uX|A3WHHnuLIWbaOe%n$-L4cj|Q5OBn)~}7L z=KhI=nC}-FU+d@BG9AhR`;DM3FZCqAc(7&U-W&EJwr(+>)FB+@5Y@Ul9$Izzc(;v@ z=!^F_hM!MQ4JpD#?+zVSM_nkq^-r>I|0j}gr*{<}?r}c){d)HAI2zDJy$;S~)$YTx zAdrUTMYb!!_6;BS@o9Me!?th!A2)`DSqq?6I4JoDi$+)WZphcO1N+Xd?}5%4Eo2DJ z2ZTTsD(Vf)Mm9G$?R#Fuw#yS5RmP-t>fuM)ff;kXC>R&Wf`uA^Shk$ivfr}~d^^I} z`-?kDrA#R`QQfqpF$M7Tk9pG4!5DF@iKC*7e5WI+8&#f<>1E+)Lza;-ht7ai6CWkr zH~iByT*UDH{l2#yKJ(qFN+iQ?F7xKoCJCgoHA=s3zRrl#eH+w^6{?qp7vT3_QqKhS zs%pmOt)^;1s!1%nxT%(nshpzx`b6A?Y?+Ix@*N6MOWs`(!^r3bON|Ux``zFlSjHf3 zge>UCKI|Ro~Xf2A62M(;qlO&-AF-+JDp|MR6gX=Syy{k13HNG>CWcZ!lDEMb|RYa7(!2> zC9!rH4Ad$^B65d4e0pG}ahp5(1I1-&>0^zpL-fZ94oRIUIyX&jWH!IjEsggU-Q$Y- zwQmem6`NDLDrW?Y6()*q4L14lbrywo)DC`1=l6YkYVmHEo<(XHkGRpC4KK*4K$eLj z8w;v|0>}5)G|#Ux7kK0rHEATL4Q$FKTk4D9cKj2hN=}`YJa_!*&>Mci-Z~dd&&(O> z2vk6>)`3a`czjweb-O~jUQhI?fu*I0mzNKufIw6{=nt5eINqiGxWR9}1-V7-L2@Bn zNxIiaT$!Zvoj-eg*PI!9xA}2}x0cO37oAqRTkx z?cQc&9)516;-hB7v&ttI6=SCz)uvloQ(0j(bZ;SS7yB($=DRKWrHa)7 zq|Jz%0t2dM1E00s86WwPnzup4a_(2S#bMI(YHoYYL|S}>?ao1?aZ+b z`*usZD;u0Z8XnxHmTT4=$|IOaKGOyEg#h{?X`sPvTb)cYcrucnn1E=Q%6Z@*&w1@b zGUv5PWwRBw^)-@N8&w0z1;`P4azZO35!?l_@*BIQ5@Va}RxYw1(iPvN8utIB6?T%k zKHo3paXh*AQ$D7>mlxTfTdSW9R$|U39%UtagBS)YJfT*@Cx-{)FtVA*FoGkjGKj|D zF2r`JHqWQVGF*(ghIv|lRHTk{_0C7g^Ld0*Bog2UU_b}A5&r|{)6QKoqi$z-sFP7Q z!G=;iwbCNgdHpA7*s9WzuK`X3u2DO)AEx*(_`T`ct)oGO}8aOpw>3#O_ zcA;VQFD`MDQb?A-RsP7kjfr{Z*I#vrd+&)v0nW`3exxgs2ZUOJq7OA(_*g$=1B{z9 zMsBq z<(gx+%5YjgSEJduu;ih%$hYIN%#IeH)Uq7}?mrk)ffPP4TilPpx?{NzG9Z~NR$b2j zlVwMt%Yjm`|LmHHtNE(El}Xua73KDy8-jq#yzr~b$rA6u;gM(SF&1E47=35M~evuTiVy%y*W1u z{o>i}lW%+$rDvAEs;J>~*DM|ZZqnmzK8WHeP*t%`*bSD{>0V$Z-_JfRk1g*ez1Jm)46KEhRYzF)%2#y2`S9oro*b0n3Oykcz`tzMy_%U7i@Ud#^&t1~}b6X#~zFYh+e*1r;2o~D2#a=?r{rw3PyhXSs z{6Qo1_or>uZR&UkCIVnuVcRUJM}yWm&6-!*d&mF&0N1(@_G~fGQpYLsV-INK48`Pu zX-&*N`WUi3_9IAb39Y0?Tdqkb5xFT5IJqOZ6>_<}_*+Rp%4?7nLmvy5m^FJ<%NaoE zF<@UE6;c>JKdi{>1Cez)Z$7~J>QZ!sNR*;p-~6k0iTaB8)6^y*#{M!Giv<8_p-K=p z^YDS49Zd-XnG(I~~>+|e0w>JxYrn(Whpzf^G{S?^j7C+qCY0yR;jh z&uufDcwEy{N|EE73Cl==Ila0n9x&!K> zpWtRcY_5Q50C!McU%FFO1Bdt*t?d>z^YlNaOdI$yiJ~QAY#bgjEBSHk1=erTrSI=#;-(Ty7#l_APy;=l4Pwl$6~vVRiT=t++K0cCFLa1*IZ7`oksl{dd_S7> zvVfe1I$=%$hxoQIO(9Tqc0i2yydsrPvmozw0&K$2mFZfsU^aZ>`JFD2GDn`vbGe#P zA5*VPUPQ&$3k0=7Sg3rfmcHeAYkbo={&_&kPK#5Z)}qQM^j&O;5(lqU$35e0@y6Is zdT_N%!ISkupepQslc8=4UskAM^EY^@Mbg6>j&ENLBF}|Q76zDj3ALYnIT1(jZ>8C- zo~7uMBpS2Ml2XIf>5Fc!e1?Cqdi#9-!Np>7eA)IB+eU(-nDggZ?;CI4{vYJMcT`i` z`ZkJErAP;*s(>H}NLNZCDovzIFHwqgkPZ?Oiiq?M0zyldC-DLf9uK-4cx0lD z++mEi5OsQ%FZhx}ghPrm%Y&9TB`G;&*vccC#vmduzY|`=*;PR~fZaH}=qUhsVd3{V ztKe-QeuQ^Ys!QOj?G2kd0t{&)3MrbmKK&Gpx%I8CE1mx;K?V=P3Wxgw;h?;`ET%nt zk>?2UM6G<3l{THw`xG@V=6VyRC|56h)Fw*O%5g4)8m(k;YDDYiWWId#_|5#G>G>IsOh5#r4Pl8=f-qnd( zsnYgFWd(O=m9s70RJ!TdmXptDa6~|L{rGzKH(FxdcMEW+csmUgjN^eb9uAGN=@q*? z%c%EV&!$KT&NW1@X9xyCi!yGE-+HNcR!cnGmtKst9?T*MIr+wr%6%ig7C$NU(^?O-&pWY{vp z)gv%)mOyu?uK0_#$N4s8ZRLe{_0zIxYn?Tn)r7d5kz9=$%WSd=_XQ)b8fj(fWV zakn#>SSCchm5(T!wHRWYE6}@KmV%~8YI%?A{`N3xHt=hF%+Ht@tNmRifwhVYq(L~H zvLBA|6F;jQ&KH%~_1<@)Q}Mbf?O+&X|LlW2qu%PC+7EM7K|Cgmv0!^WhQO#kuD-Zh zMvlOoE~dbYK#96C1+{idOUsSrhpLxBMtXG3>d^w0L(F~#dETyDiX!WoPpg^e$*5f{ zM2au`o7D?|T!U{5B7TZm6nW6k{<&W9dM0`AC+&_nXu0sxzrq-2{`)k8|C=&V{{w9N zGvQF=s`?mwt8*TpP?Y(l4-n``_uM&2t>VZ-iy3qMBMN|QaS6;{hYW-95uPLJ7`wSc z6s?VVIZ|D@OkVbC_;}3l@z+04<=crB{9qW@5pV+~6Dl?e^}!vX`+B#&L0+J8Tnsi4 zltiXCf^jWgY*T&{wQ7i(`zit!(>J{;oMga{DC5ns$6yn>L@))Sp(+jb-qy-o;Tb z%noPQBG+jV=qe$nQ(nFv&I;AQ?#>%vV<~%j7K<)Ta-hp1?3E+jF3+UaN~qtem{oK( z7L+1O{_@iZkDU3+6Lm0fa!V*}X0HMm-V$+{(t+IOo-7+E?afSu217cpY90NrnK5*Y zj-M|MYR>1HdM`7+^@T)&&YvQX_MVaOx{A<`4my@y`cAORINtss35klst$DSE$$(5- z%b^$kS)7r2B9vrgVt8`{wha9mpK(5(ptH zfYk_KgW2Q)!&jlS=sT+RO0agiiK?JusDGmguObtJzwDU=O7>^~Y^DFFfXUzgwz&($ zqE*e-JAG~h^jmG4w!i&g%+Qhj^Pl6eT=Jg~7`9c)+8q;7fRf67o$-GV%>EDIHi!`5 zx;()kc>rw$HrUxs|BPg38|wP8C)IgEUS47@`WNjFw7lP6Ba@i2I^R?g2dJY2!X_G? z!4ujQp#oS~WD!(?;P=tW*%X7v_)g1FR@qxd@7yOptNylu0iCd9NW&%S?a|#N2>WCJ zN#$okdH_lc?b0|PqCKD`@2ZEbh4Eu;k_%i=y%j5TGd1BR({EW(^SApqxHP{HzWw$z z@3E-=GRHTN4B+)vheD59MK6krKNf2Gm?N{;MPC1_mBswmz94#U%{R$B zIF^*lo8^U+nsSTN4=Ufqi2P@uxWE7Je*ls@)ceoZ*>87g07meCM8g26P`~L=|IN?; z!J+Y24+r4SsQ9n^8DsyGS9;hn4pT8ly^_XX>lg=i;txl%En`y&pT!l$hA!69{J6xa z`~1$U6z#OF0|gpnFwctII_xH34)Gww+Dx{Zqa`VbdXvfu*p(5#D(6DXT)MeI@%O*_kS~->1~Jhrg0J4?dAY9_3Jh;qNGOE5RR!zQ zH6ycV5eOR*Hw=!*5)=@6g}-R4Y0w#W=2L~`yO;D=op`>0%%ThK>Chx2Yv#D0cy1MoZX1IOwzd%>WEr*vE^;~YvryW@r2Elou9e; zvc;}s^_Mg*&<+SNS+NN+C!KfffDpuwIX8`MnZvj~DI{?^(Vh3j$u_Hieq6{bG zsN+6z>QJ^VgkMP69og=0Q*w{Q`dQhBEY=r)NcYimuCS{F z&FtLB(HDNVWJB!kPj92T3@TgW&e~;jh}R|sv-XQV9lVW=1}}evRG(J#Er6FLiXUTg zm`e$v1e3n*ObbCQ;n}(+L#267Ac0Xp{%rc4IGRWEFqg&Cc%(h}B>+A^?py}MxE|rs zce8vxKuvMHAFAH>*a=oB-{@d_Ipkfz=2EU5PT_v3*INyy41ssB!`%TS^%p))Lg7jx zE5WGS8z8P)7sf)mvoi`yN6H}^Oka=JCc6Z^&F3$q-`0GqUo9_lMU-qqoipGWdDBV$ zi4|-CPVtM(&ImmsfZ=VgDksGDvW`$iq4yYOL4~D|y5<*YT^`RJ>?i|Rx{7*(`qH2( zzz8cL4T9O_;Lo!q!yKnPpAZIuK5n3H%fqy4Tfdmdx4t;G_Y1UE?xAJ3QqcEL(&GkR z>%E9ta1TF<5v{kAesX1##9v1U!`8Z(6c+{-dw{DeKGtbIkbV)YKi+pm475=C;<|K~ z2lpCay#ZtO5Cjp^gjPUzbESfKUGJPLoEOCA+T2?*?jq0Vh@Nw=tBbkiZtV6pA;8Qo z@{Z>B>Vb@}gHP|=mp8SW{0Z*{V@(lrGF>+8ILT;9%*;1LSG!6h!JRM5wJ_wvFES$> zWrLa9+vxdW)9-O>%CG6q-UcfGGZxk&UKmZ?36FvU=qsWMPNGUUh6o_#Xv)h9H&Jp^ z1&i6!oAme*j)wr!QL7C$aNgW;PX4^zV;i~;@D6^six%<;qI0`g>(q7GSaHwp8_vzw8{E;QN`z_691|3LEXTEc^*BqLK0)1!qyX=5g4-d( zND_#Sqolyp?(w_m7D-N_(J0@VB1^4zOCo+PEMoCV@1$E(?lZI9SWTgX7M!TUJLKS= zdQ3z;Le0ujD4+o;m)@|$iw;q^hx<`p+fdCby@ndy&SY`c@rDoB3z(BS z5_*iPfS|#y!Fa$E7M=>FJ57Y@lUQ)O(U`VpYooecy~UDIzUFn@?vlFK^}jCa%J;CG z1wC%84nJpgL--pc3|J%Y#RF;uu$!=kv3oC znoGMctk%Y}+5zzhTj3;H1E7Spr=>_Y&xCei96W-kie7+pS^(5=&ifOxd#syW@6Vx6 z@5tS1tgEZ0T6~gK`5^U$RK{0kyr}fIDEXgM^&k{s9RS)mpQey#5cHPwyurOkuZEHMYI;Km+I7JXPad=^~H%z_I3f^W)_?o}xsN=!*u@O!crw)^8$=sFxu` zb_hB%3;^=aGsZ!9TMa%2o|xhon-NZEVV_EDUn7a&Yb=a zrLL=bKAeIt*Mh5_Bx3FE5zu~DTYUiyA_z9}>#W2pz{QdMq>?%JZENjyA;XfUD^db1 zxAk>z6HGgnuo_5I0~+3ed;z*d2*!BQR`hs2huy?cM<2f0v)B!x#gAG$5&MN5UR)d3 zsS+Np;%yE6y2~jGCJ0pQ$S4NtVe}FKA9&6luMP3(1nD}V!(C^pJ&U)jMk-k6rmVD< zSp2a5@D+yFGQ^gw{onnz0$v47Tth%Gotbf>h8MP)7ot7cdKo6W;Hok6(Nb^H&GFIw zbB}HBN742KjZ!etTtO%lG1G0!A^a!cuB!PV`)Muu zWV4s<>lxYws+Y{xX&usYC~pq@T*B*}CbkO_X-)wWi_;QHp(nH1p*IQ#JLsC>Qs(f! zf21#w>gmO{O}x5Mg}SBuQ+r z*vHrE`%$!qK%{Cd`HstD+kVZLUeS8a_qkpazK4w?u~vyui%h4n%4#RNu+Jd2X}NgM zE|@@6mttOgh4X}Yd$Rv99q>3M@{jH`Y0i65*$Uir3p+#tPAGyHa(f+{!pqgw1G<3g zM&aMSNd$~9>=|LQKl38qXNY?EX{rL-7PU(mUwhIXC7ro=Ot<1mWZ_;h?FEn{b@1@f zm%b_>y#$cpI3OZ_{=Q(IS_7ATxDhXQpQ`IJw-0mX#f8uEdE|HiT^g=Mnp)xm$yvZA zEh3R!@jgB6BG|I}NKBc4S>yDTakss)7d?P)wf359aiz!&D@q&g6f^cxBaXr)Fn~Z_ zt^^omU6OnRJ#>5gV+3uTtM6;y%^L;9+DX=3{dXTCRQLDAu*T4x##R7l3dtequ~CJbed}Cw&2ULE+bXqPirx2HpU^Mu+D>8`~S(Dr3wW z+BFu>z|xNyPXHiw2OH8IoB+N6HCPe1>;bXkC;}Fh_1HEPcbkeg_e$oS@8}N{2o)u0 za|fdD%-hI5@n)Bf4n6ftkz~Iy{6xA zRx{r?RrsB3ENO~G-(YN$_6V#7?0vf5R9q(X0J!)OVN&Nc0QX){Ve|K2N?t6Fg#1aa z<@|F~<9!wd?1QZVA&d;_U;lj(46}etKB0tfnR>PL8O-rzLPNcM3EuBG$r1Q?4+E_m zw-9vzRs(t+i3OZt$9#!@Nj1A%JCImcEx293Fh!+z@@dX!vB|)@1((S3+%C{ zOx6h`4!Pb5xB<>C0kVqU|C-$T@Gik&{(vs%^(g8$Hvq%|A#nU6BPMG6HEAu!<^yDE zIlKm&S&=-(0+Z(_!1=cN4D{FJMp1s2Ke2PsQ3em-OGYCzNtDQ6WCCM?B0bba{pn-Uy?s23^b&7jw4$?JMR46tRd(miF9z1 zM*d$?<=HW}GChaI|Nm2I+{lZM09n?5_6}18m~{UyvfF<)+*aamo}G@?AFajg&Y|5vt!n&5a~6RJtu zz&UMF-9tG!!2MOeKYjM>_L-ad&t9K>s$^V3R61=T30Maf;zL`TvE7P2BS#(9iW3jV zIdvy0JjK7^C=R*;uA8!0bmOjsgtNyU4U?T{^ToJsEin;o2ycPCA7(cmAVh zyHIY|6~-MIJNDBN%%+U82~N-_H=2v)X!sL{Qo5?qfl2$oob;`_s-KJxW!p@(r?&+F zw8^Uq@h230vgZZkLvP-65ad(8Suuj7pMyT0LV9@8SQB!3lUw+s4s0N(4XJxS-Mf=* zEY(E2?eAVnFy|g-XzYJA%k6yW7`~0hiLFTXKt z!Rh9s%S3rijvdF#^FCj^T%8qt;{mQ^*iMTVWX$c8t&+NT3fH_f_Onk7nh)p$Mj1^a zZ2^CJJc(__-LLG9B(05$|If0r8dZuJdHm;2RZpoW4>S?7o+xkwd6ww1NDN;@4XXt|#=Uc8;l6!m1W+b&Yb<>7_n#IX{w{d_aR z!O5M#Q4BZq7M3%*tTwZm@U|q-Tq1jrJDDoWF`uq8O!Lfpt-IAVJbRzq-iX4gPScfx zaEv|KlCeZ#++F=xv#C~gjAa>RoyZ$Y&w4eU8&+y!-H0ooq0n+B%E1&W29^)wjn}Mx zX2%d0nosjVSMOqd3~{%ou5jfdd!ArFx_?tkE{qRyBj&bvwQRpH-}dYqd42E@s)DOn zsgF9*1>fFwBf zsIcuH@iIL>tI?I9;Bt5&h)=j#n=|@jpO3wSC!m;qzVJ>@A>a*+wTfD|~{S z?PJAPi)m{_&u+7E_(YPc3|u<%b*3Lu1@BIbUSe(M3Cw`RWY@Jxb);?r)jqt>*(1hYqqH znayRG)3)?AiiXvzyh|wzaE+2W$yb9JLRC-mNnp}wws7|n1tHNavFB%BR3hM1F6F&- zC7H83lf?p|xJaeBD!;=j3oyLTJ+vE;+d))2c6*}eAh^+J=tZ0dgY7Re_4>NHwh1|R zSItY-ZF-yP~f&>-pls zx#M`yl*a4gq;!dsPPB3Q?LJD}Ne@4tCx{DrkD!1_zxwh!o z?#|d7sh8N9gxf-VA45Nte3Z{LoD!QixP_=mGhIFEgCqd-pM+eTs}Vr}i!_*SA6s4m zA_E#OIio1si?0^L1BH%vBb2@7KS5vHQqWr?=--}2+$KN(b3ZCz#j}?n>uzKXqMqZqDe)EYA!e0VelMEmGh=qigB(zh2XybgT9nobqN;OEHN(H;a( zU||P@;zzD74N(t0wBSo1Z9I-Q-Ogs-7jTdC?`;@WY5nxZkk$X|`BwfrS~da6~Z)PVYSC%QJP>xBvZ1s)^3p8mSVR%F4q z4{zOhcb*q3ONR~!x?MEmTTY-UWyc*6Sx-n03elOTq2k~}t`n-7lp3e^|CI%~7TqZG zf8yT04y^}t$4miKd_fqXZSvpN9%FkLwqn9x%h|D`x01fMJ!eS-vhqcKs+(2LFJF^m9=z6Pj};YKa#GeT+zp z2zEd)@xf^`h?&%#M@I-Ng5Jc!<78VF41oZ`UkWXQ=bO-+XT;w&t=o~4U7$E?zwSj_ z_k#sJrFiIUn42#u`Y~sEaVza(O3S<&GHX8>qd~9YGW9q}y{6zopZ3Y1REx(@h)wL! z+mk3YxZrQXVR$HP8i)b}f6N%W$UaVV^r>(Gog)BB5fiOXZ@Xqp2ssZuaIo${9MMvQ z%0#|)7mO6xfQ0@W?Wu@BVoms?AT`Dfeb#VH#d8V3DOfxx`GeKxVxF1DtLuxF)q~!# zvO1Ro9{OGUWEkv%HGcMRbF$b1w3%~%96HDxmxCJIG^q9u#@{T$eda5(klh$d|FE3n zA(sBxFvg_qo8v|6-Yt*yLxFOrHo*&>$l(k+ixYk+y8yPHX&v1jQC!;%b1R`D`B;)vdAT&hwj|IQTUbeE}H_GO&8|GWceR=XyMp0CL`8_di zwoi^=4A?*<2!f>FcKrg_Xno=e;42S;nsO9))Hr8i<_^1@nUuE8@@Z_mJDz~$abZ1y zoY5dQq6H!K($S80n-6A!icymDqW=Cmb*1wqZQG7T#g_W{ zr#ut?sU)tqIT%CI^F<^A-o+hl;q5TAI2iMy0nK|CBaw=sv-Z@V5Ox|F+&IuVmxVDD z2pYbTnaRwV2b!*pnaW@{J*g~jL^rpvzWf+zNqcX6OQ9@IZE7FyOi(l zqcUpbw#Vuw4O=*$7Scv)48&FOQJ3zhq~Vz2Qqpqw?DGvK+VIobdg4E=wG*!egI)C) zZmBi)%Gb@O%b841m8xy6H(0{W{eYJX`ybWM>xZ7_kI5c%W+BuVMX8S)V>0w=mvp^& z?hdJ=%#Pe7i!0VwR-$_O=l3uTpOB#`J>`(tHk%=R2-uPHD*jY;6VfgyZmPu@ETE2vJko)IUj}gJIvdvR+t}O)@NVnny z4D{a$30)Xe-W)Vtbr=D!(}8E0pQJMcTl7!_3qo?m2}4vy4@>=?`x`hDBp!inzU zUUU15^?AgP#hC0uMHWNdiD}UKo=PS>coF{Or|T+Q4X_CU<`2n;rCOVaVgh2hy@9Xg zasH-}GiOR4PZdpt`xKw%%XFrc(kncn57#56Gj&eiz!*1wk@5I3&Nb*x=V$i*w8?Jn zna&VMQ}%x__9in(?|S_Bn@`?L3c7W~7@`y>o#HUK48sW6qDZAq3 zPR?LP=g7`Sa`rT)vD2MzX{GLQaiKb}a99WFJfL?A?{Ed|MaomH0l~P7wR%yuC>1-y z0V*t$W&DnqRp-^EkRZ%nh<6D-> zebm6lcm;e+IYQc~F#gMa#EX1Pnex9Nvqv>v0yqq3Ith~K!f+U0eJzQ(`a~=+)(~Mj zY5qRNNafonr%Z(he+Mit7%48lrnQ#5`L#l%}}qcyYv=t71FgMA@aC z{R@-_#WP)>cR>q1Ay_jhDK1flX>y-Xmw22j?6e9F;<{6qaPz{^Eh3<2WDPnoX_~&C zUpKyfBECA}bs=F9o_XY5>OGCCLGI^bkmB5Kbe9C?iRT8MA7NwKf^~?$-k$uVWVfY{#lcL$_G264q zX1h(d%G$E#hE<#aLd}4;o}JG`&-n(&YWkX2tW{_C8MM|kKi^E1J#vw_lzUZbM1Ui3 zDD69-X#MOI(Y^En6%~<#5ds|g14K}ab=}QfjO&dz%c(c)E|B4J7hFHXE|Dhr&$e>| zF~10@6@e+8)~puD`ibp5@$$DR7wwzU)S4iHcX{kH(!-RP%@faNueFE0ou4HN5WEVA zVql8eHc>H8I^J7{6_s~}qw!k$z27ftJ~cP{*5EPl*=OVtVG9of!hH#&*K)8my^2~* zW4=%7gK5SwP7ZY`WBzw=dTg=Miq|gPd^#&ExH?m22AOrKXL8B5!hM~$>aFgQeZY51 zz$@M1ani$xB-IXfj}P0|6Z4d*Pp(1F5bS;NSWxJ7-ON(G@_A+-uglizYct#4p92Qx z=avU6-NLn?o(;Gao#T(^!q;vZ_!-Fy*V{{ zG`Zq8OVQ!;A97BFpl8NmR|s z82%MJ`t4qhfi`p`F6ms4uNUCCAzz;Ki`V7lQ*#eCtHDvO-q~NhjmnK%a*9D;W;yn1 zN~2C4GG<9q{xr3BsK2`Rw0d@5k_}2l&{%OMYI|W8Sj2A=ItOQTr=|n^wPS->1#Y)! zyWi!0uw5QzB}IM9_)1}?=iCumqvK-59*l3}KrNtzCS`MXe}7%SIYa5`J%gb##y>jbN@HRYB)t^SH@GyUDa%C;Kvgr;N@L0MVN;U51JlOt$=N&L$XC5+QsHhs>BDpMHM zc9PULKJwoLa!39b?J{G;5VSbUnz&xqV&J%i`+Yoyo1!Tmlc5v{sOZJWYdkX0m%Kw| z?Da#9?HKX28dwNMk9Xn5QsoV&5L&YQVMzzz564Ml1>^Z~jveCE_`n$=wWkkSk8Z$6BGqHY zR2?%?A?W;Jf46n77Nd2c=2}|uvK{T45M~@=WvO#%?ibnpaX(~CbJB!+U7PNbW}SB& zy4)jxeW8}O=xoa8K>b_grKW^fbO>YLPvI_=JEqo_-5fIw{$9_8EX0*NuL?C^ibHj-))0fvAe?nI{#yV%Xx_3m zug!Qg5hhCNwqYDY+U2_9txwaSx6tt%s6Dr&S=Z_6#f_SQHuI4I`ia`-BJ)xb-v{5H zWuqn7;Q=RMxINL3pmk!tfviP#$CG!olTWnDywfRgZsvS0*xYoYGt&DbV{^^*ZEH&P z4XgfjC$-KUA!J}_Ce9OJ{*(JQ&Y57Or7Jd|K7qcrs)lN~)U^zZOje0nstOV=#)h;nq1xRf3kjI5t zD4K3(oq(O%uj7sqWJkz{ra&T=WfjqdC)PtD&1~`S=xPT|t_xHSUuL|u6#-EjvvKpG zf9PByTHC}ilr?Al;jpdF8T>Hs`{B0?w2<*k`h}*pZ-eFAuTARPlii=m*Sj54cfczh zw-ML_1MK;-Wk9SfBw+;&4X6rqAT0z5&l(vx28TuEK5s#3fZG`xz!Ep40BQo@S5XEa z9dJ~BuL*()rWD9GV>w;Me8sXS4>N8q5SLPXtsz5FPCbyt}@PqE>p zEQIm?z8*SW<3xfe&34XbwZ|(`c#l!S{uYmYGO8te?8)r+DdVZ2;?k3^<83+aTfv+2 zo-OXX!{5>#32A)^47{lXvN1z#2-eosY6;B=HuLimZB9esc6$kVG{79zH5$!FRn;_3 zyhc_(mNHjRozlxb|NMoeV}cqaUCVk_&_3KH4^ZKt*guW01>z|hq3aWuV}o_LI}`^4 zqcp?ma*EMF;5a{`)M z8lT?pp|`u5buT8n|-uCnigW@B;W~e02^Andw z_bBE4gAK)nYHO6%l?!w4lI&T^!WpYdGCUL0Z%t%e@sK6})xNg?v2Mm+xdno7C6)3h zv*lxfN`Izg?fGTb>0Qi0;_rYZWPg4>CNjCAG$MVu_gD$xesz@-?%$f5vJUERJ~rI# zyLi)>;7VI`Sqd-Q5-_iYH2qcMF_wFq8gk;x(`Gxtnw%*y;OaC zegG&{nv_wx8*{&kYYrhA?(ja)_xm-6lyuGpx)klZy5x{V5kZHKtP(4^H~XWeTAs^-0whbMnIJM?R9p>lQE}pRJJLtG69tVEt5vUOW#c@U*iEDQlP$pQ zx-^tp0*9>lQS6JXmBdiRgs{@tEd^{yjb3AKq$}@_uacfVJjDugLi7z%y@On4{+iZj z&S-V;KO8gMbYBHok9JBj5rkJfV@ZsbTk@m}DJuMGgs}64em;-UMvlQB&FNT~CWXEf z+5+rp`UsD&{Y64%9lFm-`NMyvXKo*30K3F9j+T$};KW)YWBl?{#0hh-m}Gh+Q@N*FN;mzV!E_SU>Cr;J%arfHjpd@bAU0 zGo}5c8$KU4`2V3c_l zW%iBmZ?K--z$Ac=nSpT;xbKet7~%Ys$JIFQ@uDn5BCm3D@oLmnr)(wHWWfx)OQb4Y z?P!wg=Os*Lk7u-^V^^8$ZAQRZBz6xg*)7f$n93+kh`P_gG18|qs%)!9UF4O0TJf&V z*!O<-{xM>daeFnx2-4mLyyUx^rL6{*-5pxB3!NoPtQHqE#d`C05zD%z)yGK3tkayF0d;34u!~LP_vqMXg22pl04B-1z+{ZlTZzxMvJA}ZuvpGfZ4O^E z>qQ79WEKv_(~4104xGDuX-|zd@N6R(HTHwS4j)d4e~eSI8i#^QElyv@sG&|q3y%;JKL_MBe=DGEShp`(HvqW>K$KwjG zMRbGthkODsU@AhugSPSVH%)m{ZhioYSo!Pimjl)6>70UbJJzjdKb>ZsSUP#)d`RbT znBaKtZkNm%J~nq9frkSpR{kXKW)YH*XV&eCR*_;q`F*a0_0>}1wbLA!64ZxaghtS- zx|ak}LK!Q*qAbmHHylAj^K7Cd%d?x+GE(X{SwsXKT)#m$Hrp>F(0ZM4MyT4Pn;@3G z?kM)KcvsoLD^Rw+dE&=4@!G@o?1cypOQ(fL()FDk(LdaF!lUjb-`6C+ zwj9wfcfaBX;9z|9nlSkD`wo%eVuC=^+V@Nqw8$a`C^E>Z z;ygy+x7nuEyrRhBB1Vfgv>w)YHXi#{5lku9vV72t&pl84cuhQ`QUZNUnc4^~qCXQ+ zUz|sZg2)5%d>3t{57sgT)+_44%Q>u;11IuUDzOkPk9Hv(S=7nBeoy5pxcrzx&dI34 zg(c+L!s~&N!k7KY%Vz%O zE@34IHHn3ww;a`VA{oU;ufXW&G;x2zdeE_{F>g&h+xbJ%{=Sw#P1UmfG8x&K!FOJj zX>OyPjG-izaXC+vE4m>L1jOyIcS0RS%ZYo48$HlIy{kXnd!p39v}O3&MDCibiy=p% zclxk>iBvI>ItXj6Y=FJKq4QA#7eX|5;7@3u$4_OIKTx1rwh4Om98qhxy+R7wo!fs= z@jcQu`eH8IHJ^-2k5VVHkCnQR)zp!)a2_X;)C|-VlgK zd{r+Vp9~S$Zh={CpZok1e`*I12@_A`pk9OxZ141@C+{d`kHw7p`)am!Y{+H5Ve1}` zBCTl9=W3;(OCaqVN>z||GO&KW-^;+q7$h@L=|6jO3tai!QHi z_tZm17CYkCW_Y|!oN`+6y5jL3;s3l9qVfp6k~i=G z-YN6B^xT-ZI@*${%2hBch{I`-5my)uxSN1@*CsvJ#)*<%!9^BYB^G+; z>ViHrc`n(!P0e_yagn=M6CiN7o=QueAztyElS8|9U`Kn)3a`EnVBATM&=F(@?@wB@ zHksLjR4eA4FbV8)ERh$5$sXD7I}euR(@HHR8fi@Q`$^mXm94$rH-+J_jhq_7a(~v zjXE5+8j_rUrzo45{4r+Rlxc|Nki^!@5`h0ZXZ+^YSIKW5{u|G-y zB5mm=<2XXN=FvZTvx>qO1+sS;c0=hL2CPq#^mRkZMlPexq%j@Cof z{dxzA6smpK&%i$O+DAYBmrtGHY(Ii6C212WHVg^6TYao9CP!-FnjS*2+bs$%VXgTh zeln+@@-wJ^(%5T|-K3^qMi|_^0#XQl>MgT%r|i_~7UTNf%1~hVdUrb$F!Sz+l{+0c zA+Z&QYZE^h+2o5KyZRJgOWmpIt~}Ol4l|&0OA8vLnSC2ejslh?wEdeTZeZ=;Dao_| z*{FnqC$3^gPP-Oog{Jlg!1Gt*6w)Q}c^%GmW%muL!e9>}K%MY>4SoopK=c;DGQ58>XXc<@VYwwiE$hLhs!l z5@Xsnco^SzSaR^7n$UG}Uez(+txXqS75d(_f=xq}DOAH;N4#J%PO$vKwZ%^|e3}%P z_S2Tgs}B@kkVS;c^skyK6rqmClBc6gevt)=@FSK0^UCtOB|1Mu6pXfZvUY33PuzRT z{Y}CrHCecGT?5M~D@QuLCliHqBTs^~u2}Ija`^+IW+}?IbjQb{YDRp!c0_Z7Gn2vf zV$9+;+5P@bTjtvirz^~Sn%p%#!KAX~co639wdf$hJQdee1bRAqZdg=xTMfL>5KOnY z>kXm;3>x7?9zuHl0EUr#!(#32W1dkD2^JcTYkbNcQZ-g*t{lXFH$T@;k>oY32P_?K zp5~T$_{DPP=(s4wpWR&iOyX#Gu1(+IEvWxZ`Lstci0yg|Ezi3DnF>wt`NOJxnum;U zFz>+UW!vUk(?%aG8@Pl-?eV~4z{~jn%CD6(AmlN|UCF9GQ>VVZr3q4V=j3v0)|<%c znxugH1y7~brEl|0K!bpcuMo~*+kAB~GPp0p1%U;~4s$uGV~0qlz!(PC2UXK$ ztIgGUKc|(<))JH*sm#-;E`=98-7OTQS!>~Egm>I+rzaHZ>xik1%N;eP>PDC_Yq_$C zVX`5iIvE*Eo`j{_* z#noVo!@|GC#q+vExj5#<#dl;p)Is4<9Te)CNeQvmDhMK*&vH5uYhm$yQNT4eSNuoO z8+nndu7ZyWx}WGD50MGqfjRMBYoj_X!wth2<_m4=s*3G83d^PlIrOZM%&dEowSj$Ql>rhxtEBEf&C zC-35zUrQO-AW}knjoGp&@T-F^*D`bRY^aMG!R!48|IP?uVKTo$cxN*(`2LZf3op~@ z3WUVX+5tb5bVVHP&io=v0dTV~y0MT`I@J2<3E<*f%?1#Ck$xZGqk_aJ!Wb}sw}5y7 zd0NMexOZQ+`EV70^h5MA@+W8A=*baaxf+m#ci9F?k|K9}NT(A&2`f;VDO4uC=bEwe;|n;zid`_fhpUuuDnv_2*hOf4LV z`+EKG_8xL5%BlPOEB|tz-(#W|`S+ZFmicQt$R+=4Tjl?GFSdVEbie?{ z5VgGYlhf}%8d5F=#OsVa7=T|k67Cw2-M?%^FsxOg`jAbUAu;(xV%PrJLZu5(t3*uC zX+loH24{9i+xt%RYU_T445b4Z&HE&~wa;2a(O`H~w)g8-nfv8Os@Sp*ukYRo)2l4l zElc{L)FwpRt>R_v-q`u1O8oNdTQ$$#Mtw1P1L?Rr9fLxv$@EL`U(=*-u)ps*XA+=i zfDWqzAUWE6ev##`R@jvwy1VNEs^a_ql#~2-j`H8r290}$hRn$^WFOT7KW@C)N{Fg+ zkzR>-Aj)Xrbl%og&ThQMej@30LfxFKN{{Ic@=0=?8#lW?l*yck!&|JA8>~)IcijpJ z&R@@ifnS=PNW+j_{?T7#W!CUHqZ4ESzk}W`Db4Eq-4iKzBOFYsmHdm@eFol>e|Z$*JZYfG_-p3>&Hf1V&$CQ49YtB1 z#YJyu57X_=rrJ(4vg5WzP>e2!A;|BR%m2sW4UN1wH%H z9879f{>OnV!gj361uhN;`ek|IKMtXnW^3In{y%?1c5ns~7!xxZ|2W)c$FDSO>3-V1 zHX+M_lRap>cUxO(nM^FnlRr>Ji|U~8y$2j`;Ud2M9A%m7DckrlUbs8^e1m{m!Pj%v z>i3t6`^G}ch-b!Ao5$jnl1rXVrWt7dMvDBF9r&NUbR3tDx{;_OKx1VKSoa9y8eT$! zK0-#Zr4J&s$1ApntFV4|pVaU5LIKKPYm6`CuIB*6Me1h4gueWw+HjXeE*1 z!&~8m7kP4>N1Swc#P-LK%rg#W1^5(uDl5?q_ES2+c!krPRvpYx1@{(=3t%2DSnA9O zd)8UK_YBGJ9W(E@HNR&TBk!<}4{}Fjna$iUrE_V4QCX$5uVWa2CB{4gGZT)qt%zCV zoPc^SUdK5;|G_Kg)~ctR=vUok5t#3+nj@_+w|c2lIrPlWp%wn;q|2}&B%^W& zXB0?fvt2Ob;M38wGz*hDSj`smf&jERGOox(vy33ZuBxwf98K472tY_{G(L^;2F@E1 zqA>t4O|{4cH5~^9hX__&j#<%_4r({*LJr#UoULX%;3TIfWB+DD4XlJ=uzvpY$|@Ls zIs&*O356n*jMVNtsGAzD(RLzSWzW{iHcMJ|u=AxTN$sZsL=<+88Iuv2d0;>`8$<|$ zo!js%Ae44QgCa|{{on!K#T<2l7V3v-K{7I+89pcJP}N5oxBAkde5Y05-|M5_fOJlr zfYp?r9($mS9`<}X9J8ghE7X*tlEsja|Yc=T*pdQ#0F0WYX z(ajQa?wvfap|8sx(ty-{6`p(37;4dv6pVrRUb<@kRYU)-k9qsq(^Rk{@`Y)iKGrcx zpuzvkM3-CjsLHTsK|UwPcBDAX&lrL`zCWzoG>pn_oL41yP3GO&JV7Bd1Q9@HC z24NsCpJAtEbzr-@jI>)+^9D^ROGb?ew|Lh>;;P=z9eA^m-MmmS3D^@Wx);F&^OH-& zTRB~TH?B7RK7@(z47Fyhdsts?*O)5Z6(D=>+=X?wS3;J&J;BBRbB8qmFFomN&Db@= z$CT`@e2X6c)4Qj{g{hX=YH(t(W&E3YgXZ><)XmRSRFuz_V&M*wq3ucpg+k&PoX5+f zcGmI5YvOK;`0t9c6ltJm!@{;LiHk`mYthEBF8u;jsUTnh7@k3>z<>~dcJ^o(k)eoC zPUp#FwdEXQw|uf!>7o$6NW*s{VYn*ydB*Yn+1Fdk8Ce@MqhXE5dx^*j_;SuKvZ~(vH?stnlgcSg{)>d{KhJ#bxZT~J<{FXu@g=M#FMV+ zR{YS2Dg;7H%SSjDTWR2P9q)}lZU`)^qih(|QgUmI^WcuC6uph=I!b(Snh17O;qGN7 z9B5%y9sN=H%vAlMseyTI=*Eegw4NWBU0vn)G5IDS%b+^hO*u={taE?o z4wTk$I?}w#Fz&um(Oiq%QvU5lZFGN3j38ISwD-{2X2VWI?c|mUZgDzrr~|_Ma|^O% zCpm#@A)um{8eYxmGZf}tMt{nDcx%OTRrIlL`mtU&2DN@Wqzhdo;GY8H=)}3;&`69~ ztE6A-DqcyoEB$w>t4oi2FS5&D7v=2p!c?XXuu_1tgm1FLRsjngipb08IFdRXmxS`K z;xg&01n)ZN#kW2c&aPDNzj${E#&%MzMxSCk^cfX{=4=)BF9*dTstu!Ch5Sc5MQ{fR z<#44!SAOb!jij!VQfZPEbXVmZ=W)K@Q_NCO6`1i{#Z(dXPlw&UD4c1$$v}$ z{ZIY8qBrPV_shLXbqJ4i44YtipU3 zUwRritWv*}% z_7S)-dK^$lbpZRTBI4mU^bejEfCLn%t0WC*nlA3KlM^#zR}r6lAx8{=X2&82aD+M~ zZSt8@!u-$&T|n_7S6-SQxr3r>pzRHwQbnDd-9hZcAt(yE`-~;OQ)x9V9^)4QlA+<= zCdwm(U}0_p^6&rqm=-ve-_7rw5Mh9vmdlFm?^LqrT^Zf7agLA2a_y+__d&BQjeaW&uC-v(juEFyUeKs z%FSqrPfw~&(0(TvauR9ykB7;2bY0X|F%C85j6EsBoVe>|V9Yafiq2-(7iy`b3hmu3 zUyhuH%ak(@UkZfI?{UU`MlQt5lNa3>LGPXoM zeqJedD*Ap~SfX|ya{3zTV-L^fV&$s_lO5@aMMA|Y;>jb;I!UH#y9v29_v)ZUSog^@ zO$=qRiGmN&CA<9b;NGRKLPTTH@$h@VpMw1s)#0?z6sV7({RyUAO%^RXq|21#*M``ba>s!4V{8Y_RVAVY2HIl=e>3 z$ZdmQn(YD@O!!kh6~@t0$mG(?izZKjWQ4mV{2@ud*uR|V)W(9?h_q5)iEne%y~HEV zrj+?H`QA@a3&TK%PlYmi>yC@hW+l%89RUQILDqQ#eS@f)^13aO!qnfi{x^?AsQ!|3 zdl^sn1Q2{wO5${>%@J(6@^GXO9jZ1>*M0iNpwMG4xriNzj6Y(3j}!pjk|C;LX_EwM z2xU>7*A%DAl}_~U$I@F4<|kK*v4~6F<~n`hvmbx9r?pX^9Q%W8Rp_)gSpUPk-*`bm zzr^ZmH)*+IBWYJJ6w$JGs5YhrT5-hw$HMvR3VP)RVWV7HpDB-f??w?E*@D6E(2Yo~ zHpd8~si-OO@$H4USCmZ^qi^Dd`~VM;JE24`38)jt^C@&blmGmwpu5L+;X7`-jBjOb4KKmVkdx12p%;=>Q0LM zu#zpPbGSJ(VLbgkOxtf2eQ&ZkuK9hZNY;LNiVy%wuKS?R>?qw#r${@u~ zv%-&~AKpTg8sSpKE}K7GfuK4_3jqV(p(j`nh`Imqq(p>mqK==y=}u6ZtcFAcu7&ui zvO+$%b<`w%L86aF$Je|eQLMlsMT-T+;hwp5_=}7rC4=?4sSNM;6 z*NmT;HBC!9JIwDJHl)E9>2%ZXeQa(hO)uLmsV-~MFI?L=*gsi3VBG~T>HhHmks(>{ zM-ogk+ClViE7D@=x+W78VOZ0UF*sh9)8(9_sezBvB z)^NlZ>W!H($Sy?7TW@$923_!67?7DuJnMYg9EN)!lrtVqFtwL9r){!w%d(y>1ASh& zG$WrRpE<4J}xpx(n|>>chV(QSh&Y92|eS=7nN(8xFr**T6m1QGN8 zctFvNqg}wyfxw?PF5A0@n1|oOGU^T8 zoc+U0{ncKv#9qH|n}wJ?K+G8eiNFNVU!EQt>59`=!Ty-wzsz*$UC|Y-{|x58^=9fG zmCViCO0tMQpWb*&ka$xjB}lm`M*)8x@iaR#I0L9K>Hkt;0t4me|Axzi_eC6@`$9ro zv90k`MU%rF$l#Yn3>~_AZ;rj((A)@D;5eKNZM{GloN-D+*}_>zbz`=-e2eoVJGTyG zSYuF#YPioU1( zjlom0@7#l1x2M;xQ{FZk9m|og0d%<1WP=hnJITmZBKZqTU)Ri5LcItjpdE@}lS|q+ zVhb{(CCdSa1LBCDGz)+}lb8X|ZV;{0PIO{SSYZsoMA{xg1fJGfa+QBi4r$eOtAttRSAAKQw zW!=eMwI*MJ)uHMPtFwb_FX%;)^z}Q^@pPBOPMm+)NU{iUWc zJ2rQGA>7;4K(4aG6R(c_ooeoGlPnn)6`m&^o1ghT*JE77Sgh6A|9Gxk!`P}UgG1zb zg+ly$z0p{|QL_{u+&u$nQ`h6S!x=gYN!|nn4EVIV6cMtIJ$bJPglc*8!|W#1H3*sA z>(z&jai&RktIX?v;}K1HI&`E687XBre5LIgp6O;I4 zjrk@weX2yWq5hI{yI&FK9T@3Q8%W9}APo*aq4~uz8PO9Ba6vii{)^XA8O1$(KLLzL z6D@Id7dI{(ZzywGE`SNu%uT=QT%d!#+|r3t&K!3?Wcvi-_076CuXktQTIlFY_9LP5Q&;ZorCjK z15&n7b2>g^sb@;`{OMg-Y``4`JfQ=pY&As;0?crFao){DxVY+ip01e$I4KDbGxK za{iv(q;GHatS$IFpxv0EdHtje?YcwnzlQgXpZFRw*h(MQ@r4b@!eJKlMCvRV1M8+u z!`5D|7tI~3kl8gtKqSL`B1wv9vj1shbv%TA+O=+f^5LwWuQOvl-i9mh#%vg?$E;jB zbIv6(s}ngAu)@xqkU#_p9Pc6>TmylZcoF-l!%r ztI>7J%IulL7c15}ba^1A#0vhX9)7_sz@t-L!gxy4tyXqx>6sY461&ulp$BV3(P2!N&+9$8I_iX3U zUK>RmS>GKzA6;W*)hRol`ZOi-+phIJFPmDk*n`bcV*}|7=iNSR86O;grJV^T8g?>; zsowtNf6&P|eUXLJ7}$fFbGVwGn%wk8%vjnBs{gxMH5=O$D1>uW=pyg_Xv%`I&o~ zTW_C~W^cYxyvD>%XXO0~B*doZ9d{5=x&&o)v->$#zCB((45-(hg@kl)A4Ip(kgpLS zo%7FN@ge*Q2_dT;bdr>I*^W8=;^r4-E_{Ax471!=LA@4YdyD6R%+}cuc9+H4p7s{q z8?|5IE@2qX%rhc}yL{@ZS08dL7+rc$r}o}4!Q!lK>AMIagl^})8cCpEu|~9FTp>zB|cl;28a#&)KSS6WH})gSIKG}*AT?w-Ce&T9ece>@f-a5!Co!moj1`rZWr>~YQnVC$HmM{*8jX@+pzltCr zDWMd=m+Dx;Ajnl^OFfq?c5CS7!4j@R9;O4^eTxSLegn|7e&IHY25*sTmMFFFUwlRS zc68?s>>c}p=ac#&)0{Zui&g-x*KQvo1Mtf07iqsEd6Kyv1uD)ORZd@(a^4g7;r+bE z7gwt~`58=j0915AFyLB|1VF~2r}IcU%GVW(bIo_R>P`wqxvBOIv3$Nf@}7l9-%n4- z!IE6#vTXjCBuTr|^DhqVOoaVVczpXTZ~{nt{j*5|rfBY7^PCFnBe5m>gyJDFanC6I zI<^c2k9!BT#YJeJHyikRb1supk~A{Tml`XJ$Mf@-4~>lstd0~XW-Y4`D&WcpfZW6v z(80v8%-VX4p{vwkFn7sC*;^M5DsD0zi?ZL~4`bgo??6U@q17XrEF`pb#tqV_BaqOo zQA$9Em&2Nk>q_Ki)ibkfI(cR@E`O4i4SB*3K(-*j+WDs~4-Z{mf%gm&0!cB0)u<1axOp&q$_jfq~UWr z6U-MODbE18CV@xw<;( z@LUfGpkLJ6RD3pOr2X($ozkh_$NncA)^5lzavNYH4ivbkn5n9LpR_6%tF)Qz?y_UJ z+M%HIbCGK8a}uoAGo{VBCeS%1M7HZa*^YqdZ1R7fWk}S?DEVdpcW8_8S*%R!A*6Xe z)f0-R;jcMdBIaR`G;qITBmWLa91t5*ghZt`@+X5LB_&g>ha~7yr6CgR8K1xbuO?Gi^3AiDIz0pta#d?utJ+n{lZk1 zb`A^ZLIEQ-zdYE*vHp#=(p@va!v&_PKixSZ{;jP`A-Wmef8s~b-FkGaPS$B1^I1IW zpR=$J6(9J?mC={3G2!`iufvwjkR^w{v&H*Gj&^`EP`eI8PybG(OVF-MY8DCd#Pew` znT`Iak}AkvOlSDyytAZcVk;2=U%?PENADR8RUJP1oqqvHt+aB3xvqN0_F1+U%&mY6_D0zhiPP z8lGP;C=iLMuJ5m0ADPwnJb%~Y_LZSmIE&VfAna<%u>=c};P^Y$bM!9femN@z(FwqW zA*&%Jqzt^@3o;MZk(7$hTKu$LDwVY*-zBic>5<&kqP+84bu8lwk9T?-E$;-12%xIB z!#N;`f)!2lv3=iI?e zWL=_ld!{QgpZ8mEql5Xgs@ac~^J(x~k3Zq)vG?ZeJz05CYzKKVzocqks!!66 zRD%h^6z9#eF~=}6e-xf6ngW8~9K(Lyu5NLXlK*_CvM%ew{=}ZHtW_^lY42{N z*mLif8YXzqmNtPNXBfLgFvNgZtz8>aqxHML3+B^5eTMN6(sscz>h?N0(AI=J0`lpd z$Jg9}lMw6zVcPhein*CVxp=8e$MT1%rtnCZtxjWbz7b#d^11y1-WRtc&I$Tag~BtI zfMVDeb&G780zLLbkS_6R1d*m}HSSoZXGZ6ZL;g&cyQFHk_g%-ST=`x?e*Sy^Lb1(m%#_95lz#8O`p84X)PKSJff(-- zK7{`!s|eQ+`HiYQCMXdr#Aw5z%f5+gL-O5tfmCjY{YDS zxMz;`I9LRl6Mj+s5l41Whail3LwgD{99+P>mWE|{9UiIze+Osr9MJD2T3N-T37nK$ zlyRV9Zis}y>XV1cePA<7`_CrfoMrHvl|)IS4Le?cR+E%fx~=@`V-uQ){79D(Q$0E! z-og=it+s2k1eq-dQ1DRD$AY; zy@BfZo)m+igFkvl1{hG)zV?SmRuTqU!b60JwQ<40K0e>q($@y$7A`%e=W(>Bbv0?9 z>(ASenK8!~ZEg^~dvdO4bu4nI)3T>GV9ONl)#M;pHu@O4-#_{=KCF}r$?%!^-TYuhYDWstN+LLUu2`<5}TU8|HaY|>W8QNpa5}PA-rKVlr z5o>p)`V+P-A2YK}1269oAkuH^~u4HX!G)gh}zWnTG)Xk8wjMXy`(P_V@DG#evIq&4_ttd?)<1|B-q$o4|A=*z-j;f%PBQy z!LVbAU*mHA(*eS?Wg`MyouVjb(EZ1wc-<|NV=!4br+c<1L@08`Ko%1?MI;;9zWI$?;}aTo$Q@DI(|6Fp3db?s^oSUbT^I)8#E8 z)x67Y?OGHbbB8&@ai;s#?zbQKHB~P?Usr%4*=LVeNl)}sx;L?*MTrEd0_PU%vBkAYVO8tEKBpt_vv^?r(!*nE($BQ>TglU zWftpRBtJ?7-}{*~vx7yv@R4DfCmT;fe^{TueybUqnUbtJWVU$`BP^G9E_dN>jN?>z z*wEQh!0r=(+mXwUldQ-3Zc7rdr zGP>jljo0WKMjz?4*yD%j*N>jm27;0lp>mfSKUFJ_%r4Mv%MvvG=mhr8uXK0MNvzjv zs)P*KT_okL4a1O~lMp6Bg3Uqgg5;=h>fM~FbUoeyb@|^S?hOlhZcEC4emDNt z-B*dId5YXS0+CpLr&9P)(uS-7wSi(<6%kRbG(@G^_a)-JL`u~aNX zD21&k?Q#0ppZ6e5CSVQ7f3gPCiwuY-3`|QZp>WnE^cf*;AAX_9#3XOn$AW(2$csbL zeNGn2dSVt~p?BguR%Dt_*;U|xU=;!@5*u=*uA_w-Q#+}O&2;w(mkhn`R5z5xX2N!{ zPsm?wnD5O~FPf11fRnOEF!_z-Oen%lMym&4`@|^x{v-NveMr*X>Zr(I@a9oy*_@h3 zA5_Yd`|wFF02h5qSBg9Ze?|gxhP85s3wK0nB@`k!CQ_B=k@-k1 zy37g5fxk9SpCscQP#X7%mj!d4oI2Vn8#0)VJkzX%^Ca@LeJQbk8$eiW-Vv448lE6( zR~i&{IMwuy1*xm$f{UEy$TdHEdJu6SI(642!>}BA!eU%?NjBY24KLES3BRv;?tIyW zsR+KVp`3Di<~D4MkW|g3N6`WmCC^d!27Q)OM;}ZSR#!{~W<2=TRa2gzp}Dudc#bj# zrfT+EswKJNz^vAG-_-zuwloKewV)@5C1*-=#WL0T@FHICSp3W095@3f0DnBG?+s1JRy=hwP(pZc*b915PSDy@-<{33H}y&-0kHK!-GlsKg=TO*~UW_ghVr-*Bc$RcwLICCB&SKxpWq%|MDu6 zGxp1w{^E5G6>TXHq$pQG;dFRW##ZSN%)@DswEdPW%Fx>8uKG#yp`G&^%AmV5K$V05 zZe!HYP3aJ^?{o#y#e$tq7mWM&zHo1g#<8d=*yp}TSEn*$KfC~Tl?iX8-QrbBilAr^ z0F{~&Aj{2n%QCImJjEq{IJ?&Jj8HR(kTA1MV% zr-Z!NeztW~>&Zj0D+$ul&t*G>+AqlUiC)h4z)dkf_mUv8_E2O<3_(jEevb{IiriS+Zy(N{Q`!VwM7V;(l@Zkpi_sKD ze;_KJDL3vrp!#i}BE!8tN(!V5GM^t(nvcLx;C?q1OhUhSvY&s7+3gCJ8Nybj|l?KAwa!I8&Gpg?E>ijF0bE?RwT-Kh~ma;t5)VDeW=l z9sBz0cW~^p`0p6hN7Tr0xodJHZF(B|u<(gnE*;+rFuE?eV9X>6Uki{((-GWTf}G@q zfM&|`2w}QJ*b9`8ds56r(kJ)4ZtmNje5z104Xv>olQ$u%g7vcEY`H1<6{Z@>dBkIE zQ6yZr@cWEzk~&ujWF5j@lLR;_PM!ENf$QZyeP3ioRAQE!EW%zj>M5(`J-qmI&cxD8 zf&%aRo$3@~88lN52Fw@}nGW&hs43|9H#q%ht1zmAXOUTpzjcy%`jAFww`=Sp*JXw9 zT{CCn^B4z4?~X}5qVAGmCn$8{8GwXl^)(&Ak4tk;e69An>-1JA0LS0y*~sIU{d?z^NpuE4{OF3lXkq9rwuSZd4kILTysT+t}zkDt50*tlr-3 zN#;)YR_gF59aS76A2NolM8r|V$Wla(B%(<>r_x7DXK?S6-IpU)U;(2D_oADQkFVvN%10rS0foX4Y^w-0Kkhbrd&}R75~+d7pfLxDc+vQ8hgzfB2Msrc$0Tm z04K1|{7t&S?A3Z?eZZwda~ooK4BYfSp>$jkN&E;ubF_9p(%rxLwZ+GQR*SC; zByi)Gl=-&lVYlNXbC_$+Tu1s(9jjXVllQ`mh%2c(%xsS0{kQL67~+ zlMzAobGUnSRFi8tO~g8EZ5~!82F+NedD)*^&w*GPA~#%!?6SIFF^=PWX&0Q_)*z4h zovJ~u?hSV#zR1%i8Q7O6s1Q0b{Oin;%{ZABhmyf)a%S~467AwNPR0w7@YMP00ftfS z<&Lc8{5`J4HMO%hB#vd+5!ipRGAjtEB|wci;TdOURHufeuh3}0McXThcZ4IRJnj>J zD;C(cdyv-2s3p)W2sx=z=dlNl_8170PZrOc8bnm1!Z&0~q(kV&QmCsPSfO|JlF6l0 zagPnA4Q37ecS6B#hST8NCxvhX@%#?HDD=vR(w4}L&0E5RXeCCz z_tY7xdyMJ|c<`1m;jCc{_>##@(hKao{ykD0mh_}lCYPv_62q{SU9)_X&e%HtC6|@M z)VG4OA&=`7)6HYi{}q4W8F-4=qx_d=CWCd}vLyH|RddWZNBF)Lo*2yH$n$o{)jk(4 znw%TJZ053*T&vJ+sfB&%4#N#?feD=W$=FqbVdoHw^*5_mM(W>L@}sM6@jzWK`)TV9 zTZaxSeAss16X`;;Bb?AIGh|7^LIx0h;I^$zz?Q)}XVUuZghpwngj+w=)h>e^h}3r)N6pOZIO;#uMCzvH zrs+Ye0yOWR*L1oh1H3qNMF!sW<%NNI6y^+Ar;N$h;Vys_SBK(~vhX$cA2F z{^F^=aG$1E`@DL@{foAtTlWJ5D&V%qhGZH7IPwRCgA|6T?dZgKzNzl1wUl1?CVYQD z$g?d)P2ewGctbu0<=r;9grJES`O&9N2JI^(eEMlj`@@ z@$kq^NV%zeS0$3iH1dW(J(A=O2?H@vg#4YCTC?y>RBm1aI9ElfTQ`Q)&GR~Z8Wz^J z?tX3Q!Z7Rcq*i*BGOn1NdY)g|Y)(G>^Y9RVdt!p|i!*)uegqw49>DiP)Tj@`C4}#Jw*5om2FU6i&gwly@H8_hd_TGGf85LofIl37Sbn9vh-CdLJBp*5?a!L_ed6 zM;|0EYP1skQGUuWgH!mVRjHp|$D z*C@=!L%9v)HCPV1xVsa0XSYStcZUq>GJP_~A4Rk$k7%qwTxQN0+k8+_KXq)s4&rQP zz@d0Z><>pL&582s=CW=ZzH~EozFkda4VG7ZqgBIdmA;t!K8hvpl7xvx*um6~q+1vu zTC?`TCqRlK;Gcx4n;7;l(R(8$S);F(A0DZXd>fD|{z&W5*at2paG0hVnZ1$}fQ3Y6 zA`{6xXL0}z_?hZDBq7aZi)JEAs_O?pD-liKf)&kX&-1mE_cuP(8DJ+;z=gm{>MH?e z{m=2230sBzcO<$}xGpTb5f%e2m`um(%bl}41NFTqjMeKsXXQY(9b;49BTz}!IJTh- zhKLiJ!HhmKY+A|{8e{unr(zvUe4lKlXss$fmJ=1l=vaG8b7>jVk#h(|fCHxt@cbb* z`#$&&wSArPH|ctI)Sn}%DLKNu;I6N>+G;5fZIhMk2RfiOotI>b+ z@_wiK6)OAlDa4XSnw-mi>FjUEH5s1|&dg+w|(+<>T z3o^XC@~7X5?&oOr2DGqmuo-QshrQYhlo%1sn+c#5Tm504Z=_qj%V;w2ogUN5Nf=5{ z)FUe_@u^Olje69^l*wGt!=~DwwH>Y(pL%%iGxmAlWaY4KHKR$t6ck!omRD7k@Fx3Z49lbhta%v+Q-aXa&Dm~iEOuGRFbR3O`r1#f8Z`I1vTMGrRKlKNv30kU2lCmJlhoziWi`;b zDf^mw>?9VsCAh>C4FaG#*mnpHg!@nwK{*f5SrrH9G>k|0YSrTS-0#V*d_F_jURCp= z`gSkKyc>`kkgk|w&X|{R zUpMeuBA*6qs%xz7)=>^(CT`y;?zcFrRJ=?i$6$a%s9IRb%()zk(5l?Jtm?|Bn{B+$ zdku~5fvW*#%n`+86f!!8>-f~`**N5uJ4C3TP0oPgG2ebi&HF9y3z@?lW_2=3AssvEo?&G)H+kAcA1Q}N|D7T)E&W{*@BoOgUM*9U%-rw%BPmi zAkF=C_WroULpzDXoD-x^0u*jeDD3H!?<^n6*?fj5_eZ^PQWQ+@0tOU46BPC&8^PfvvoENZAV?bbTTLKILCfh^TC&8^RJ5+0bUowW0l~#s7Uyap}0j3xP&(jP0~|;B+j7LX~)u#uH_BiP&>C^ za`~lI=S3DnRhicvqF?yfaGV^|Z}WE?5T+rQND=OM31ZyXrr!t-AuhZu8#BEpB1h?O`oLbDU!uMxp3{tT1)Vfk(Ic7 zB`=Tr{oGgy0bq9j56bcX!U3XJbBm)#B>qIq<=vT?raJr7ZdaH8oj3KzY5?At@~p*Z z!KlaHO0TTvm>(@cX@!9h;q~kr@3Vf$Nm`HwyJxZJ<`+*x?;&+L!qH@B9S}h^w&96+ za1kl}wE>$`^w#|Su(p`qm-z&tYCsYFLHw^$&wsC!v9p=b6IsKP-aMxG-E|f25g?#r z|Kdf8md3TRUa_kaofSYaUI#nC0dvL@u?R%P@BkzzT%sNQ<6I>`r$YyT*#K2V%>Y04 z4Es0#_?uziUdiYBXab-@jiFb&6#f|}hN@{1EkPInTlSnt4c>s~X zIGJMtl57BINDfKk9H&+hxbp&a$^;Z7b8I{M{#)ba-QDJ}O?3W; zc69%KP>pV)YZ*@~H@(4s*ADn^$D{;^NUAqQ{Acd|Gk5=;efFPq_y5hFt#QUBF}O}j z1QcydeEsds3=tFCDM)*UD$vvVmC=AS&H=b{-TAXc`nL)h0ahyKr8>aYdcdN==h4_a z5Nj*9|MtEr*ZQQmy)ROiYi1WoKWniE%iA+LKc?;Q#9%R--|XiC6u z0_dhKj!w)3ZO?BnU5*mp7nZdmJp-3TraA1z=4I2uCGRWVym<=lZO4AB7t#%o_9mg9 zpb{6^n}u=cnst78()ADi=DLl?j-?3K@imLnS{197tin78DdOF7eLm$T804qPEvbNG zbxO|y?P#-jkr~MnAFNXQJJ}WjpOKpP= zu^G=KiGAbLR~hDmDSox(LneEA_Eq1MzJHWU4Sk#6@LtSY#{JTVt{*lDHSX)>)tZA4 zl7z}823^@*c5G#9={|9!9D_o1DESujxu5^mSL~>8F=~t7?q=)W8?6~8u1h5d9t8H| zVpUPw5YIeW9ncG6bI;6)u|NAB&pxa;RHyq+OO@`_n2_aZjOm@l>ejiRs6hY+2!Jo; z&@Kknl=EQ0sf}dx=I>M`q~1|H`(9So)8Z0q>)s*G>HeGmRvR_J#{xOli8`YI7;#Gu zfCaCgAc)ga>a@L zfmb45noE(@xBF{#>ZaxRS3IeDcO|l`XJCf9N_#m%30yTgg9`i5UA=`~=$KI~v>TRu zHB}#CdgMs-qnvL#mVyfqog>JCLRLF5SBXVYdBm$RSHm9dkGIsm$+uWaI1?xHcEOsj zQ1&Bje_vQq2HFKplAMezMLZV!6>8PL**x|Wv{f*CwQka9%|)>(T&_9p!#QczzD1rF z^g^X&f;8V>ULjpSrX{-qv@bC7IZ_l;q~uof6|7;@q^f3tr(>W(kW;e$sb)q8|Xo#RhxC*|D2uYH1YWPG5u1$g1 z$<3i8T-=tg`gL`)iIV+U zo_*by1^x1OvQ7(lKnr3n6x^bQaFQmmMO(y2L^Mu?%*N9Ofc2nCOdRVu>{%VHmkCOP zHEtScWu{Nb>Y z^=BFB-`$HWUKi;a%Db{~UO}!}Ojnk^b9Pnyu-w3wregImVHxFCfpEKPRClLaT8inzb7K+_9-nYFh4ApfSXWQ66o1_nzKpog(z3+2qeK8~|Ds#AaY*qBUtq!Jaq> zr@xnB45#0IAkAuBQKu`^YbH>l>UXN1&burI>jGCLD||mwXC86x2c48txI;$yuam;C zh{*lGmp~1LX9*wceRN}_VpV;!PO>dD+ws{;9s9#Lfv3GX-Vh{&VB^}XhGUv;>ICst zpSX42A@+14SzRfY31jBQxdTB$3PRqu*c{UeOj(3R#Gl1Gynx(7v|kN5{cZ6S`Z44} z51up+H&{BG(Zvlg133k?+>hbxr+v9W6~oK#Cx-Rm*Hn-JG7^;>j(4TyUp25riJX4+S;C++w3iGR5(o1~tfo6$HDn^3-acvwJT-vjG?$c-iKZT>ooPya7} znEt(rtUoalhImot(OeWce-opxA)t^U^Whm%XfMFV+0C8#$!yQ3jGgvK%#4--c1{*B zmJKm0@9cq81Jo+y+awr~vC}LCblM#V!;8eALO(o`YI$V^C*IMa_ei|%&_-LrJm9)H zpDOYOX7me-7vRr*?X}8WgYl&1}c*2S1)P3X1+g3Lnw(NO%Hqu^OygwK;|l$GGcl+7ig5s6>tp@QqIv5( z_uF)BgzWRa$|OgLsq4u{!P@?aZdlw5vK{uKF{#_Y`L_GCqP2_kPkQ=*-N$i(^y!w$ zaQwNxF9&A{uMfSG_x$3)*ms)e2t~22q9iK~0qY);U(u|sl|FB$Q-$ZX7^?-Ai<@!- z-cWj#p*{Vt52?i&HY1nQ*VY->4?U1I08n}{GDLz@R_fo>;TH&8MaUrhC63>z?kDbE z>dtVKO2|@wdi%b{P|A)_WpKYlD4mGQPKXL+aR$@@<|ZX|9@)L%uPGu)d{+J0=QdGm zF1CsR3Xf;{{pPFhH#R-L@33jye~x{#;H%B&qdGjKjVTnwRTIo(2Y|k$Brua9`~gCv z4r5#PjmHuOpBm!tTsqx#F-P@-#toDmYA6XrnajCCSd3|gLa|9v2XY2LZJ1_hO^u}2#-$I0h?mc;JK>9x$)JR7y!pE}Bk zC4VRO^uT7+Zjf+T5Yu`qD`j-dcZc5Xz%PWqZY)>-&Ng)WNr$2EaV*ZJ6Sh^n<72B!Tuk~u9#gK@z?X~sQ^r^zhkdd|f5h)yIx-E4K8%CT29r958+ zZr=6lF8Kr&ZN9)p(0xwf@)tBEPM|)L%e^D= z^1WrTs0!lps_r)>b|NAS0Y#khf4^B{tJ7;Mb90Og+&JlK3g4@RH7?A@F3dKbib$W7 zOYi*jko>ek^A;XN5`_+?6}Td&DGZSYkD9^GcnxdAmWeH&{mMG@1wuQkjd)gE9=AfZ zTXadAH2tZYMbhp4Nh9ARuu_oySt4T{o)4@;;>6OUxNd#$zuDOx2cFHXptBDW`m!Sb zIpLVN!Vpcjpnv+lv;piakW^=-2#1U;Fn(2M$g>{6%CP&3!S7ZM`Bcp1>Pu1&hzi&@ z%Bb|pMs+&0;_A-qdjSgHVMY)-e>K3$6O{mpxn_e8B=Nm_1lj~8r4b)6v&PuHy&(nF z+2)CdbXOu*QkkziU3Z-Oc!_(}2Qr({?FZN^8H&bt5aaS_GgIMW+1tkO%uMwy7RX5? z9w#-z0{?J!gYKE}P1&|fUP0Gs<-xbtQJe(x?n}+y#Gr2Qh3c~F5k+BU9TEWT*{k&Z zeFe1}F?AG+j>pc>6Gv)ZR){G1AyE1d>F*N>z3zz!6-qQsK3%~o8qZ>DRq8LXUqURxfZ03!a7T} zxEAzX55B88%x~uM*rS?y$7)AO=rkrBf4?h^cXM`+={;z_eq4!ZhR2k|g#!T_!A%!; zJgdfc53!}@BD18D+#`cz4-eOuDs>glc%_;eIfbeo-YjsBlGhx`ZT4Quzzi|NMG1F^ zDv^rRc%GvAL?+>#E2CRD^FgQj`nOw&!rgSH&%RKJeGz6ZmS*pHq@TObb52JxJ(P_n4fQXdPATnZDy&pRx>ddEzznJGL2yOO``3> zGexwrHh&^yuf-{DogeMe4>;v&#b?Ijp7P|gbnWPwOtw_d$9H-~A9hxefT$1+Cn2g6 z#qC}To~p3(ckT$xb>*HY^aZQ|{_QK4n%11Pq59afFD2v8zm1$KXUjx6Uh&TR5g1GM z28wVN6Z(kMKsnAqV!?#sJX4#hQT6D%;h*C%&^>3ESK&2-yem1rFZ=dms9$@p&^+-# zy(0%5Kk=^u;D#}9&q7xKX@l}bGm+(wS>0!P7dv_A-Ak@{^e5l=e4$dOQZD(_efC>t z`_9l-22*E)-Yc4~p?koyds-yDB`HLwCjZy>lURapS<7~l!ncV=_!mA5#D&iJ%(BK@ z$tf0ge%M$0@n)_ zd0lFu?Ki)PomaU|uV?boTkyxd*uz^|0E&}EW-3FER8^4c&MyL^wQ>ueyK}{|+-g`j zfU8zMQ4bDn7vyznHK;i$Lb#TcAf^v2lR7(%Ce(fN-OM=ywF@?ltQXlTYcnO@6!|}| zyU({Q`f)|?oXT612eY(7A)kgJ$W+Q4{}uFbvxqZxk;>+qodi+3s)Q60%wcfzl4kn{ zm)u*bU!IySxjc{ml&mYNb>3pD+#Rn=W&=Q&(DHBlnNgAN>PMRerCRD@L~NZV)Z*OV z2qs#N3{KprvXiP$zE#icasAw-&{LZQzwEz}HvB%a=I4IJ@eMZ!jXFEdsAP8CFWoFK z;`6xFyH>;6F5V7258b(RYl{(68ChQuh6;!)ik`r^6+txVutFu&FoFD?b}R@Uam2HG zoqMF`^PQYiS8}%1f_xJvVQ&;=-`v-`iq%t#FjmeQbQ2#}^AAY#;m^5`X! z00kyHO+!Y1d%Z-9t?r2tD2!LJUyfG}l`YZ0sC$F)M&6$qE@;U)!kmhJE7*V7H`aedbGg`9HH=|N4; zchyG(93Bf&!BeIOB_`lT#|l6usY)c0a>^BO*rsYg=iA?(M42n+Y9E`06ZF_nPaL>( zU3AvT3#Yob6(D9tw~9qRc;&^9sSBxF67~_N$y)w3SBf_V9u+GZy?$K3m%Fo z-eVqmftbLZnKL(nhrin;EFnSz5dfKI@U8veahV*oVBMQg<{iNse15l<58u-XUMA1# z2RXL{1+z6wS%Q>EU~E+UlMq3FYdk86talbwvd#}0Q#Tg##!O|DX2WJF3aAZ5KpEQ4tZOBSa}8 zUFl6CBGRM_NR2chgn)FAKoq1m0RaU8rFW!+4iN|-ARt{j2^}d3B?JiZJiqsR?>FD9 z`Ocj8ch1aNGqdK8tn9^lLZ0lspS|yW-Pe6x(sU0+4Yy@epxd7-Lv*7wZ+omdwA5dW z`W$KijFRgMrHSyMK}eM;`A(o`$J7Lw4>X1TrA&vyrwms4bGuEJb1!bO??GI*P7uvE z>=fg*PI#g$2lcDK@(3LOjoAZ8M0i$gcblj>X1I%TE~%$}^cL#ZRNTjx?BXszg50); zmoEoT&qE))d}ZoLC?Lu<5q3Hi2cscZ56Xnq(Bo3Sy!s!DT09If4^0_GC)}@3)QFLM zTqUt3RlLrW012`(J*u~J>(Ps3K`HNX11i7k%1 zzVN)^L#PQCCdTH~JwGqj`PiL!pDEz+A@I9YPD3fQepw5?o7ij;5_i41Rd`H$V`{Ui z_K4$nhq~k|A4}A|8}c-4@o~ca&2A@j*1*(YB&#oi&iO?HWnDY0b83ao`qreNiKoqB z%f^hR)eRZ7nA;i?9(`9fO3mJTzB#5>;~tuA#rBrjVYiHkieu8^u{tMe;?c|Doy5kB zhLHkoBce9^EYrQ#_v4hH3C)lmzN8CAtIXcU7n!E!=5Iog`(2ME2TUF(?*QU46=hYuY0~=K}d?>=t159m*t&D z&I)+Sm*|&D3?3riXO2Bh&e#d_b{H#L9CNu)RA8yl$;U4yYOWCyqh-b>XaADTFsKhw zJzj94=;ww>)2%L7t+4V-vd*$fu?RRnViQ_^E137n%iP2}YP%lIOM+BO!TtMNObxRQ zudenUg5^;azudYf(xi}AyucB*-QP@t_@Xf><_5zl+8;99)b`9#KkFY5DNs+g-M0~1 z%acYhtBF5UHuAZocpN<=NgTuRB0a~i!qf_8P}ht>FdxLZSVdF7uoFqpLB z9bY~-9g}(cGzE<1#jkdO=m9|e2#l}2seu>-*6IthGNNF)J@pJq$&W6(-*vT%2c1j} zONzXE^El9^G_nHmXj}=o0AhfqS+)vPSJ#f2KK=Q8Y{|^!%QgPa^nRWQzI#F?!ljjo z=a~6yQGkMZcdOi3MY9DApVJM)#)7)1;H_EZQ+;`PGofW$p;=i@gg)1l-TRHXE=qMO zH4>q670pinm?FDK8nR$De*RmF_(ISZpNSMA2NMTVokpgSR)R&e&pR=Q&2xzh7VKR* zegM~d(ymkaN&^YvjjLepC_%TMaGr{db`)?Lc|*P3=$dc+p&I6+<5+G6l#USW#qLP{;%@BW=zYNETPV zFc_d&yU%o*8`>NHHb{ihZOu=rBgph&rgoxAbt4NlYB|bs-LfBgM>E(C)z{XHCTjf9 z*H~|;rj@(WS0B-C7@5qSg;zdDU(7GWt9Lpo(q1#PIUgm~7K zuQ_2;gu8S1Vn4do{WV(h1_bqv`N{M}&>Jy?J8p~=%`_)O7%0ehW>pAYTl+fQZCk|I z#?>?gp$NiJu7=$?8_E7q;)hj0zcz|3>UJOKa@L`dRq zm*uZ1%Mw^V=0IQ){^o(GgBg?E*`8#puemX0ysi#x`nu1q;?VB_SZBM4z>D1>Gk05_ z=1|y5bbRrt9p>Uu+gl?$me17P;f|iyc(ZGK9sRf1+zR*Nr>Ykp=bXGiwmndyIWZ*3 ztQU~nv1kymy_Vf`juq?k!ombm8$I2)ZR4%h`5fu{jzg-cm*nlk8{ z`pPvOKVqK#ctq*G16J^0bljmjAv<*|wekz>;WBM@kx?+y;`#n~4%sbsLILG67SD^j zpA`jadzBT9pm!^sh8~(f^3cwNYN(HsP6YgGlowLNe+7VpIVEHR; zD0t~TWdzzKWLe%d0Nh+@|G)Eo{Wea?t_WqN6l3R{RO@l?_E44lG z4Z2xgRlc`Eg*^hVsVeT1tO#z{)UNr9or^nfUu|VY)R^U_Fg1UDuS2Uz%j|un*`DF_ z6$2zXs}jJjE2a;L>8E+Ef?TRNy08rVZ!9tvV7q9a$(w4~I<3)@=yc-S4f~FJ`Vb(7 zn0H2qxOW3Fb9L6%^vRQg&2+r%LO~n56g?fO54RI7-699OlsjeqY<+y(Q>`VZx;9Gq z1prF;-qD8N3?5*+gNOSL>KKOX9scBtLUuk~zn05FR^-3s<mQSN; zQp|dr$5fz$qI2@$kTa0&I}XF)<_6}pn;KZ|WE*a8KLq%uiJA*k3TdQNqGT4abr{l# zz6y9@2l}cK90tPu8P^q7-t*KoG}pe9*+EUIh1Ml#73ovaxZl|70pvgKFX+X61=5Ou zHa66(Lg`hKYQ9sXcWLKo>SX}Xd1m?RRyB%R=DAE1K`>cj#Q)J(&H<&(+Yd_gu3xg{ zX?f4IT|QcltMM@*rEIKEN}*uGUG7&3N7@JE{K0TpTIFZ;W}3BQvlQ0qYEx*PgZMgc zyRsx6iJ?0SV)mmqsUF{N3T_wfyEMEq_b|q|wue(9qx7Pj(ls?N6xor|52*of4W8UW zwkZX%Qihe!6T}h#FR1Ye%*fg>$)=Q1JNkRM*fw7Z)YX!mIVOsbjp;%8RL*{{7}L8z zc0O%sRRn(8qaYCk9iA0KD}I#>Cc%rLtfrc`li}Wey7Jqd$@AB2;RQJvsY6?Pr{>9ZxhlcUfZ}Zb! zwP$PiOR7hn25lQadXS;ug;H?cDkd+L7(;S%3oKddD^K@Rx@3bDj_%X+Lb*4jn?4p2_a`-F5vGp1bK$ zQ0pPBVMAwA4+HI4We*Z^j!?XM15lkS?q<-{RO?sE zO-mJysv0K`rYYue1&MB*iv6bC{HY|EJwre*u z&G_=WQ6O?Z0K(wsyi2F9*An@nuCX@3ANX!v6Ztz+qW)5{EPAm^kpry-3DesyEfnoF866eD! zqijmcGhPyXUv#Y~zB!Nan`O$hkm+IN75Zf}V`>I@3&N{RXI+~kOOX2faf`0r&{ke& zy;pDvIOXTiL;Kw-%}KPPhh~@XVNmmp9kFLuyr1wn{K$a^tYvKkURC?3)NxLm{myQ~ zyzV$W=#3ha>HAl0e`8<$JA3edy!L;9Qr%^KxG9JS#LH@Aqk$@W;EbnTIJXfbhyT=i zWlU<_K4QH;|A(D8iu$3!Erv5mf|l2J_j$X#AS8a!<`8!9E%<6H?OJN%a_vX_1f~zy z$37l_teJDBz4b1zEVj~AH!@;a=2x+-d(tt@B9NxONb-f-1uT}COlw9PwuI}V0*a_% z(z9(ma_=+CNpjp>6?3GDj#s{sB1#}+0F!xs5i^FEQu-^=Y0Kx*NA;JP&aV42!ee3H z@o(+e#uK#wsiZoisFNhviofLds-s@RWVYHJUwGh3E`^wM1p0&oa)Ar)LDf5*dQr+e zqzpS8n3eR%Z$K>_lV%$3ZD*JNn^9bG+h+GyrGUt^nF`-0he)7i2uZm#hL^?F#Rmbh z&LiG*zHP@l9g2RXH34Y)xoF1|CV^<^lZ3ujiM-@SN#U=yDitjyV8gMRG7jp7aWUJP zI8D1yZbya^ZDHz1=OfN%L>O~v-n{CJuB)c^Jls7r(zcHEK&|Gd&FcWLzv=D{@{r*$ z%}uvxPhfcmmWLWWiYNM?#sew(euY#Fm+80Esf?K{M(OE_YtFWmXSvVHUHe#{Db?pp zv(5N~i_Mk7mOr2u;(;>~22$?D^2p_z@&3!qy)#6w-l}F}2o2iN!utVG3M`>-Btfnm| zZD-U3nG1`hm`rjY?G{>MmB3Y&?+`;)xfR8GW3vZMt()Y&Rv>BhG-!|`eA7Fdg7N&V z%)`y}6J=Mb;w2`EuHo<$soL^a?ptP>mArEC&9@t?e5beYNkk2j8y>c}iqV@AZ$E5& ze>{)rt`0P$E#iSai%qN&m>lC#w+vIPYg*dx4Aguotz)YFbAR$6)_Fx(QQKZZM`@ck zPRd$a^4^&TT$*ylC7%|Z+r2i7;X1VwzPEEJhivEsDUT;~g6(+ZF|Wrv>`Do9!)ulV={F3V$x_uV=nCfgT zMFH7%heA#EAk0S~VB%5D4p=`WTU#I{907W!=y6GJlxpCQB1KfWP@lBwv(baZSXf0~;%5FEQR9qhordTI&`_y_Q)~ z>pE?#tG*p(79j-(e!F#nEkVmK#!^z;bo8QIxhQQw4Sg#UD2Q(d{t$$S6S^QKNP$X1>e37*rrH*S&F`ImYuxyM$q^KV)|N_@r(bsC?ctvzPbE zUKC=eYssrQEbA}GNS!GSawLO+fj6cMDpQkS1ln(P=G~D>D#c{|N(G-59NhL)T)GJU z{oA9DxZP247Yt^>RTSl(M4+c!5;&0~0Sa5B!1MVHxU8!%5Z%5L0$lJrP3&{Zt53&X zm!GP+3$;JR4<>Ou4)Wo;Dty3Ey(0}5)@wvnp&OzuRpj2Y=;jTDW;|_pO1=C+sQQ}3 z%S&r?PW9muM&8W+p4KNv9;Y*n7BDYKglvjJP!D#o4+cNoc0;FI?07Ei{F*27kyK9T zNUGL&*|ziy!$3vInElwYj-gk*lYIgsSWHGE$4%7uf^2Hga%^A-KvauXGRE$l^Rv?@ zWJS9sc54rpEj2aOCn@rTF^g=wC%yihc>!jmJL=*cs7z<^B>6>cW9*A4wPmXt4$lIv zc?b`5+LqWl_qO=U>sr(`-B-HJ7{~JdCCRU_U6H>^%s`EK!0RvDoPU2YXL*-EO-yU2 z;LAVsXRK2!{4&~`^O&ls4Lv%aGP-{{lbIa8@P~?vq}ok1eH9<#h8w+E*-}4J;1`MU zHHouJlYA$yPR%{{$>6y{@Bgl3qTCk~B!(Y+%UhL-IsEynJja(VwDM!^QJpD`i-CBF zJXZY`)mmZpztZ1SP6n1c;bbdp7IR-=2exU=4Wii;K-+fR^{&<9;;rk)cTEk|zc3H! zpHXw%NUKf#@~gF=%(BxF&aS^ZfRi%B-08bVW&e(*NUn4Uw{xW~+Txq-@%7TO&uEzd zG(yzuhpknMPwIKkjXUSEv!m`%Wk;=&0aY$~{CP4l?aLQM74?1Sl?6q&W& zfRG1V=b22w*m!1=TUVOdI9V`&<%^R*=C3|lSBSzCz>yd%)`*E+Q~7)~IBFFk4}2-9 zRj~(gFOY6`{m}E7vb>fT{hw@&YlYcg5pw0O3H4T-??Nt0Uc097A9G$(!0na$@^Ob7|9>24~ zIA4Vt2{)!|^mzoekbhSwnt0x&SePFkvAhu^MP?<15-4-pASM8n)%JDIC#AN%s?B*N zt;aa!>@j1h>H_#F6ih!V;;%~;6y$^M3f1?9J;?w9QiM=L>uCh#20|6cL@OB-Lr5Ve zc-^AHIjfC5o`-H?aMcg($r^_D1nr+oaG5;p?Akar#SebV3bwusa4+d;{J@Wf3&sKT zbE*1k@P)=Wz2?OC53DSTOuh>wSt)D(w9IvCyx$ihr|Og|pp7{&~<1D=G+@Us7 zgak^_CrhgtR!;xA8~A%CgSqP*CU0`5ofNogOnhVv_*+_pg#)buPZ)pD%q&bX`&jYL$Uzwz9g>lLyDze|W>sV*3&tuE==FXX0@kK>%hi9)E zxRBoxCc?~Da+!p8K=eyGm!?%^W))ZXJa-mad3l&^NG+N~5p4L61l!$$AB?-J=MU@!h(n7%CR#vUkxCAg` z)!4#~=iN918}EVMGcF3BEHh+$K*XZOb%m|=by%#JNUqv)oVrfRKj2TCZSE#dFK>3u zrUzNocgd}5Pr;Xztp1~~JDvRI*4b6)P11!tLc!u-6j=a}VhWrqE1E;Xv~hjfebb^QT$fLbvQ*G8MSIzp z0+|1UJ|1;c-t((3OX)X}p%hjCwtM(YePQtGdYZ;?*-A~-30JOuVA>o`MWj-#@-Ze+ z?dnGTm2Zrf&Q_y#g6C-vM%a4`U`CRW`lyt2ec~qckmYWVvlzcm!ZL#d6Qg8PwH#-( zB%_B8*r~DNw}D)y^a+b!2o4kxbaqOcApSJm99`3ju8BGp2COU6rY_C$>=o3V706)n z_o#Z4@=&8k`Us78d`Lp!`auiSY4e@pY#Pc!&{yGPKDJQ}M{Pw2>VfF5BAIPKUavAi zBs!x%FSCL}xi=H<_?sLtC$=dT8FuMyS= zP>!n^&aGVP@mklWFu|RCR&ll1r~WEQ%g@t2nz;pqQs5zR-HX;}Lh54KY6yiLamxx~ zJZd+303_<6tc@`ox#r3_$kPxXyxF6P?Rs^;-N9;?#1Hl00KvR9%00po`uui~G^vV^ zJJRf1wl;n^uCUuDFv(ll?8maQ@-tV*FDA<|;hU-Vm5WEy;IAC3ZS!w41@NV<7YLxx z{q$3ofREyQU+b&Fv|ltNPpQA}>NWb|=;MQM6~QDW>PQ-pB=Nwyrc>8pWz^oN)*bHh z(RyAxv5TcD^j%`!Ey3j?>coo%!Fpw;D64A?>_jxZ=9Ym-72Ye)$M*i(`>>+708#zjq* zR4D9fU&)vNOn|41!Nsf&Tqz5Gf8 zc)O~9I49SXsB_YmVjS7kX_X8N*H+1*CPEZH|M;hiII0Oi$FBdbO;bx-la(RLnlgT` z(q2k$N@SVk;(-s}Vtjel!`*tT49JQr;`BJp$yx;ZP^o|&1~V6#U;^D|mZ?W6^C6yo0?Ah?q)dW||#9)0KmojdC3XoXZ64@|n=DK6jO@lFlyoMP~h zf2c3x=P?>m53YjR8Of@2ePJ-lJii(%DuCgp9YSMy8(QOzt*+~iS#^EkvATONC8jdw z2lk`foVI$}sUe|aa}^sMA3Bjm*LtNhkHn(bgUvKX4Q+{ulqmy*J|9}OYIro8tX0aUONK1M7wv^RTm zy69q)xK!K4P&xIP-hi~s+6g{SehKiXU%1hd!aEhhVrjzoCJeWl^|(_DXXpC7`8=BD zEP4GST_R#?38Arwb~CN#roVs#%O5JcczUvY3*l-7fg3J>872kOa)Q53t$l^2sg>nt z@g;TMFj2|3>hd=?@%7;+zPliQtcs&)hlFOah>Dgf-yyka6ZlwM?Sty$DBZtbpUlYbwYyM9vwYH5c|Ir|;eVPbJs4H{N-TY03!*tJBIO5&nMhlmoi-bZ zzs#F(%O^BD@mhUMX*!kM9Vp_`$t?s25RbIY(qL&YqPM%duQ_?lSVd>J_R^*V^i5{s za1M_jRvsmO?{?m|6w_cD)0hIRxAm7DSA07}RiBZ5Lr=sI+JN^JTw{J_Lz#bMxfwN@ z$&`L_U)=D5gxYdRk5GHz?)eK2d<Cc>y zHf#JKqN=G#Fj)1j()Em4fsRb>pSN4;f0l}F=8l%rEl;a3cDahN8(*nnK8FZpcv@}0 zr7!tf$U$nXR4N|BiHy#w36G-iZ9o{jwqC62whki#Z-)~t1{o%&ptMltoynAToqMB_ zebV*4+D-lbYzlWRFCWAM31nt+K;Vh>90I81_dO1Oi;ygaDTKv)ym+Tmx}o2}RFU1| zfgI2OG}60FTj~AOge|mWfB5$Ty6yK4cQ?WCusuy_P9WN~-)hqawmcNqAsu*040<&HKx; zqT#6fMAR?EjlqfgENQufY&GhQWbx~o+qIad9rV7DVw(DpCPiB~;K@0sao3zxz4Xhf zi5$)6h%W%0Fb%-&1bmeurc*jIr;TeUPc^L^RZ@54&p6!XPgtlPktt0XvCt?%>F-?3 z)ohZl__am5FwqX!h7auFWW$<;bD@-*qq3(ZMzvL8!adC$4TnDpx$3Ub+JAZ{Ih>yT zSICG(b66oX@H@boPJeBlRaPORCFY*Bmc6{ko)C>~_YR}~PTKwKi3iJo`X~U+p*tZ{ zyj_`hNl=2G!9@9O0GVlB!HGIg7!a6qYfYnUQZ#w0hueYsGH)z0R&C}!MsEF%Y^tkqpv0dYD#mQh2Ud(Ptg6ih?#C%kP8KG69~h<1W$cFI9MXbUG(Ms78S{ zWXK#RA#VZ`e-((Uz7psNo}NY1zdD$g@>NsCB|iIz1Q7%>k4l$MvnbT!G5n;rt(Sj> zwJ5E7-_ASDa3j`c%3iHv_i+=uae*#l`nCM`f-PerD{t-Us(P66EmB`Q1SP_e1(32g zrdm^RU^a-;vCc|#sl}YrujxC{Pa20OO|+q+?Y5eWrf&B3wG?z$bJ1f$>^t3->mmzDHLoszCy4(wS`q|@tav2|RtID}A+?2e7!}*J$=u)y_t;GS*|;Sj9MZPr`Lwtk zIDc5iH|)BRnW7)XV~~~kb>1abR?vC7`r+**nN-n>p!JtIk?a@gIl@H;d)bYT3#C`M z1xexn0c3Tn$YG&Hpv(H^u-7RVTUIOX)vuC5Sx?nZw)J=YrIdK4@9W?WCuh|en*BwK zpZk#OPKV0e1pZGPp`n{_x=z#&n26-##Vc_!m#Y%msIRF#_w&ULcI;f#fxBaMX!S<; zs;hKlX1Cxuq1jcj(WMuSRh}hB+OQp|$agvO8uoU|&CZZNlelnyLBlmWF|ADNnKFdT zhJ5oG#QpSVql-h^Q{5*IiGY%?4PO~ZxS4T)cRFcz)%EF7D&W}%d~jnWcSK2?=vJYXG8NRwj?@4V-z%v6BR%3=nP%!a&!>U7JdUk0scB}_?>oNmbxJ3 zpI%!M7MvzuYXh{OeQ|c>6^&Byz#j`g}n>LGyT^x2IfY0T7WM*5&SZr?2HX} zF}K1ypg*umKav+RX5ti#PmIgo?kcaY47LLpJGV6ca7)@o7URb3Jk?(Sa8ydxkBy1( zrPGi#S@*8$8WT@wo(W7|)bMTvpUNMqS6NDV*3BnGjiF{ZJ|1hjxxUyB?mQmPp^3e1 z#i>8ET|k!~ZRT~-6hVC#PJ8{^`1)Sa>}?Bw`Ne}&ZIUqFQ}44d#8B z`VnvA+1J-f)gM9?qU#O{kMw%g|yRo4f<+6EU&?IWSI6N zzjldVX=S?4$e8cZ>+sXbJ=zOrix48C-e4x#U}YNTR*_KzJ9f+0gcO40A4v5P5v$m7 z@h)QFd|D4uI3EYS?O;f|$@PZ$xGj&&NmBbl!mZ+5qp@(F5lm`&^Qj5v9lMrPLN8=c`0;b`_!3OYaaLsZ7`g_KQ4AjLHf&1cPks9CiIze_rG4U4)msb| zynI(o*SY-XGVmc-nRuw3O|BnA0~PY5St0h*%SlxZi#cW?#~2~1#AH7I=&&|k>9yqd zQ?6^drKSI@`u^`#8q)tgNsj-=QG5PwO}ov%h(i9WXyxC#?$*sKnZcFQm>`g$am7{} zS4;^-YS`_$j8m~sT3lA)Sr>u0z!pR;U3_o3A%M~Th0_B1S^ue<-r)Q>lyCy# zrq>hqk-wXhe}kuek+c8sJ%rv-tTOt!ai<(|;<|HE^bc#ve{1A@@Ae9U2aSQQfnDg% zEgH{eg&_1Zsqx+!F^tf<*e|i>1C*_=_Hjg;MhgcTA*EaaxBi3ZMeeK1BRDP-BEzY3d0?APO2sAd1^bH&n#1a%H4TPLaV^5z{f_Ml+;xHeg_b#8$5A4_ zwgUEMV;6qjI2ZD!dl=;~wEs~|D*cU1<&vrV7ME$7!swPw5`zq1@Fyd@VtliPG?(65(Idz{-crS8U z5a&pP1iLOg`7$Bw<3a{&n}j;l&dxVq{nb><_lJYTNPET45nrdut_{q+)VST++(>d? zupsRqYrINekrm7Sd6^nx`E#oRutlN!R7Gw|m$1Kk$_4BbZ@0wqH0|cvvkHYc?LCll zl>;07ew-1q668fzIn4tD*fTvskD%V3Znl47+9`WpkFWXe($zg za`^iySFTIcXUmSy5sW&m#Yy}p(tu86Ghg3QPbl!a7E0k`?VqE-NYuCW1LPE zV$LQ${}0s;J;dBlb6TasXm?MGbaY~8M(GN=ugCCve3)#AZ_Tk4Jx~m=@QvKudGR`a zHiDe;Jl|>HdK~@SN^9Y1W3w9p-8Lm2@|+}y6I7Bo&a6#oOx{%RGS4k>8md&hnQY;C zq3z5|-Fs<=0fS+47aJ|=zl=Pui6ZhvIhFlX+{sf+i=gRU{Msy|lSZzWcA z@$FPMk@4+eO8!9_5zV2akUUsS0VtwS~H!c$kC)%fYNb+Qw zrPU`v)Q?u3x98t%WkToStP3oA%4bPmk#3+c3L`!)mFU2lVbC54B(z%vnC^J^g15o1p=Q9ub_m(=Z(At0c?B^te4viXEqKUKf zherSK0RCr#mo}#`y8*e?gx)Urhr#o|HR2w#svG<_zUD@ZSX_40GN8+~{fDaYzq)q+ zZ%uhND=szWzCToOz`DcxAJ)wO-txHHs1PNbN||n=Z2ZG-{GW{_8!vArQ}NR}-_y~5 zxTOEH@xA^3|3&{V7$ZH7++C9GhOn*bSL4N>UupLHgZTa;=ehjFoVw^|s22g?SO_FI z#LJ3PyU`t*>*snOX)x-Ylv1l5@-k7?t)Tb|V?BfN${#Ar*HXcvm8UkO0a|SD{ym5NAM@G&x3oMVM(f?f(62s4DQYvn_{215@}Al*jhbwBUylIf zUK9>8zA!D#+b2(-#OD-NET$kya*o7a=xiB6bO95SgfbsK`f&AI0^*D^38WQH;bA+i zL9RW+n^Mj|kh_6EDY%2_X6xSGASvpIW^WMCp?4&zs3C#$A>ms07NBtXusaxyzkKqC zDrpTk%!GmApko##pvzvH&XcqBPK3}7hv8&3M99%9@VDxdo7w2tKUAa$JL1qEsxNkJ z5YqWTpv(7>7*$Yh{3$IrFaY0#cPM%}|DXNn!7)8heKrG7N_xqA zT{zZl#8HtD!`Tn++OKX|Aek-5m-okSMz|Ns2xI z){XsjD5Nm?2w0rX0}PAzaD>`1u=pH%)s*4UUE#&#LE0$fl6Kr5stRN9ei-timw{Sf zB>6d$fmKl1f;g*FCc`kozi_f@?RZ)@#n-uy&yBwto+5Kcx5r4ROLppn9mL7SR+61_ zNmG_Nz?qyfMV^wsEIi+=iBEq-g5!Jd!*IqJAj@tYM;i-z{IE z`kB-IW%~ADKXB_`AT9=!0PF9yv)<|o-L?2rU1V#&;~ ztI?m$p7~{Aeb97evD@gE}v{sAO{@8CmkL-t1tQ|*2 zv1Xb>af}6uY4a&Vld^sMf}Z9?=b^;koN&^FT2L+JCj_q>e-R;sPwk2Yz}DKFQc@ql zzl!pzcYIUp&#KHdjx&G>JxVx=^-5|i&4ct^_S4s>_)-HR@lCW(0s=e}9U>Glf?8I4 z%XJk%V4s_lWn@DkKJoxyGSY?gTnkgwc4$lLr{`ISsg%}Z*A$*V9w86K$f-&so2scm z;Kv106B#X^cHEYSk0E%8s8cpzos0~^9Gh;2;M^{5%+YboaNi(wrA!c0toftlyrVBI zxN~))zQ4Wr@%{VfYgD%g!Bj8Et7Yq>F}MP*c-Fh$OxkP(LNUU+isZ48qx=u1>#Jmj z0*avye)@OGAFB28ZS>vW`+Jm&V}Gaw`AP5t;C+9>*9-@cYSfRo<7tuKe*?)q2o$*1 zi9!J8Eot#Y4yaoCTZ=46oH7Ke_WDD`egJ3$06`ZwePfn#HvNF318BA<=AN3MJe%#8+2zYPoQI-Jb#-IO{yLZ*vzXU470vbI0%%&sjg}V zmb6sUuhzPSsS@jlu|k0vIrh{w^ml_CN_iY6SB!1@H~*yOOL zt9g0(D7wj!p`5yg_#bJkam&26dC>}Tv<+8}8}>ALMV3izA}AJ4N^R~d8{%0v`vMo{W9GO?oyC6nzbzYwlsE<6uAI2g z5~F+g8^hMi=bk#~V=(F|Jl*{nGtW;g)R;RV*A1%;3%7@Bd!T4NqbB;$f`?*K$w|Jm zEnZfE06O2i`>9t9wZM7>n~~Xf}^BjB@@l9_#!*zt|`_Z6mgVfs_Ox zgrNg~I#uib!VUA7L3eNOmn)htQdAoBw!`r*j>5}TN-3%1TdqR7~B>eu^KAJ)s_Pzc^|{sL(S zTmEH%eyRjJnki+5FfQ_Iq#ltj+J|YnqBNBzpEtf?geWt6bh6d%Ke>66QE#b*dGJ6h zV48poYvkVd%eos?^&SqB?xEEpl2XN+_sx5E%D3hZI@+`F#&Dx{G+rOXOqn(y@d%g6 zcFDl(3P~?ur{9Ovuh4Rf=`?UA!km_=zxnmLXN=+I&m-L~kwweM`Y6&7Hk2?Qt}H&Q zUih1McdjRIMwBc|@5$VW!Z>fSt0(NHYc*n@ZBc^;qXbV)lFM8pz1J)0BPCh+<&DuT z@*CL`(rVCI^L|Y$4^&@Cm8s(}sHU#VEgZzwtevNa&-#WZT~{5d*{_WHX{}jkWhwW< zH*;l0O_i_Q;s#HsKGlM0|Gt5Fmypo+U3a#~i6SACsP80St0rG+Ho!N_!auG!(iHD- z`%7j!zp0VjQ86+|;??*=6`M$wJ>CFzU6-cPN=RE)4msW~6uspA2H150OmM#j>GvLx za&bTXgakzT|FgA{%*sY`dk&udx09Bs_Dug@rLVx0|6ilP*F28{I6GPA$?ha*DOrFN z=~Oo*)8INjpkcKNZvzYl+e``r*xV8fEYIsO^#M4tM;8J z0OHq^I0$f>0|0^VsM9M$>VMs@9J0`#=JZTA0dS{00uKKX{zym2F{hIDA1dS)@`xp# z6rM`-2F`QU81x|psN7G09kXZ0S4>b|0y28L^vCDo7b%W={r^dyfC%*Bo4{A7Iz}RQ zPJv$InE=i)AV>unOZl<{+NS|^lLuGR|BoGVB1senLIXWA)oHBqElq+^2P;Pysyu>w zZfeyiMV_uo>n_E-^M)3Cc<-~|X-%rvx=16Z!c2*p36Z=nBX1WrQ)$iRqS&4lbwAyz z-YEfs*M|5!Fu-n}Y^4Pbq%r^oF#pe)nL=g-FaS7!0igdY24MKV9s~FfocaF-#sK;= G`CkD0^r&0_ diff --git a/docs/reference/ml/images/ml-health-check-config.jpg b/docs/reference/ml/images/ml-health-check-config.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c235d79984525528844bba88ef2df5d7ffa8f04b GIT binary patch literal 72237 zcmeFZcUY58wdF0Rd^!dkH;sP(Y+hs0jkn z6Oa%fz|HTs_u1#$`+4>`-#O2{|J>(W$(syh&AhY9teIJ})_eQ&b^~zlrHZ-=00##E zP{Y0ew=jUWGRVmu0MOI~JO%&&gaAAoMgTsxghc_!<1qb4Sp|m|fcy9V?*ITXP5``r z$9awY{tK|J{axqZzVCd#^FJ`|aDK-9A7$Kof1%zU0-kC+x_i5OIl8}lAS(C-@btO5 zCf;8yV&U&H_us{Pb1aEVDS#%t-Szgh?tK4J_957Lmu8okHy3#B%ex2E4@%wQB_la zsrN?Tz|hFp#KzXn-oeqy*~{C<*AL_$5D^&_9TOWDpPum{Gb{UJPHu5YY1x9zMJ9T`EN#g0~*j93o+aG|$tD8vBVj zMRgIh)}GVEbX;OEZscF2{Y}|FM_Bm(6=nZH*#D$!8K4BX^Ecq$!Ac1i7b__|EZ`I1 z{{;ku1b+kJzXS2#K=K!m{RiA)8^OW0fvtQO`z9kIAo`Er{^`Q)DpoByZ=nEETpX-2 z;Zgz=0N0q}c-}jS(|bU6iSv70L2oVY`Z9ZEQB}xZ5hr|G2;JFRz6IbE-U3n<@@dT` z3`AvX9d1HuAqa+B0Q#EMp!pV{V6cuRy#-)GT7H-0L?uq~8Q%g5tKBhoLvIvRGiz=E zQ}f^(T!mWzTdg$j_FEg#|J3aCmW#sa8SobHe(Dx5y8k*}>Z8%bNo(Q7W!o*F&*2uZ z`N!X63Uh55x*>WCKwi!kC)?Bgs~7*`#lQ68|LZYgohZEL@<5XQo@~&T?p+mGk7$tw z<}1b?WtzP9fq33hf^e5m<_xt3_m&USiSTc5)6gfYt}E8orH3bZ44lcDcDH~*MT$$n zE#TMAEx@Us3Ym>by4<;rth)vL@iLS&cm4ouOvq5U{qeUI0$?rCKXKK2_&GUkT~|56 zPD?3Ox1XAyNGh8Z7;V0n62q~M=ly&q{!zL*vv5mO!;5FeImxtdJk@#L13nW>>nO_7 zYma&XIW$PCSh$iRIQcw@)B5qeIm)ng{;K!?-bL{qgC(9bq~jLwmJf2xJU_XpzA1FU zbT)NGWPS_yyV(Gy4|Bwm(Fz0wn#VFW$PZDzDKu2Sqy---5?^byV`=NKc6;dG`B#BpI(31% z*klGS{;Y-ybxHpC9VuOcLeM(O6}@>Sax=?D()-jgU^G!)Bq4976-kDkJ@O&h*SJRo z)zACtlFf^19h@zJ_LL=oVU06jIy9GULbLBQ) zomW+)Yw%BVmx#CMREM=QBuMGwZjT+xQ{SxL0?06d2>6fW!a zB;`3$_S|}pL8pOE-I@+blXztCt&5HSSo@oZ?+xe8dcZ9pqE`kqwILK0LbcZ`y3j4| z!i?fk?{VVl@ePN-YBQQe4V8j7*rsv*;bud|S9nc_Y)8^>IlVFToqfgZ0e6*AcqM@mL)~Y&Qa7 zf$PL&waH*FJL%nXkt)YQTSK>kH%%dXucAEHIYYfSns8HZ0WsxM)j^W1@(73iLw94% z2lW_kA=DsiUWtE`>-*qVAlK=uRh8ex%W0n$=U7FroA_Jm!*2-D^@yv!&RamzaZpHw zSS`I)T!a!2$>Y@(RpzgR$$DGivE2Bbkkm6@^ z(D=*wn+MK}@}L69K`;PGq;-D;uSgdjH9vdZ-dA z3{ieCix}948zwx+y-*%9kdUDJ#ShMIj<$`JxlM)5vK1J5N`GRh8x|5 zLPb*DnuTPt&oZ#MS@3#-ut2y zC|rNdofVS1XuIHQ#onNfR^Jv7>2m!(ssF^Hs$13XIfP5b%}0u+K&4%m-*5*RBMtk; zWVJQ~LSev_0*>e#!tE}90UvXTlR#mn(r`X|{wOujn!*R^PZ%`Y=UV{XEdW@nI><)n zBNSeU-^;kSPSdRgyRz$Aq#D^gcxO~ZyoGx~ox{xc`Kfl-$^-t53N-a*Oss-qJ=9@n z9k;bQCG3VDF&`6t24aWgnv1|y11s&DTypT9uX2Td=LX~3zN;8&_-^wQjGRLGFNQGR z%-u+$V>Y#-Lxj*N8}5vv>$~Ujq@7)+T^i%jhK@)04J)*^eh{42b!ka)IwaZRmz>6{cIKAW9VI%F&LxjMIu zT?wFU*pVV;b=9hhK0XJXCh`hc>;t$mlm|o;@J7xMn z8W9{6g>Z+-fs;Z+K!D%3facL8j0{gq{h<$wjVaQ%wX)ykixrh8DO&@rE2o@ z&{v;sh3wc>;V_E?>5yLmM&6crbHPC%YR<<9&33u9bv9w~sS54XN zgm6`5>%q*CEspMzO^gCh)x}(;55Rl3SoZ@z}WDTtjphi?JY-AX0S1~>O-56e1x zqoK39R9O8W5dsnjM6l0?h`3f=A zPnRa^$aQEtu7mShhdd&NVTj$nK`TG7S>E zE>y6=P896Z-2#;@y$?02Dab&t`8F#owYYm$(k4uhpHn=K?RZ?J$R4YtZI)LD(OkFe zAlHW*5p3|C!D|gzO0r-rC_WVZ+;4vh4VBH_SRA?q z(1v_dXyzF?jtbx^cr+(}e{Fqz)$v{W?CJ|24-Y0~RwfVyuW09~Jzo<5g$F3w+OFK>{|0b!~IZ9bj9lNLZ z!b%yTtrgA6e4XoU3FXt8srTVV;xEk{{LzFHW$PwNTIBI>xyS@pt)pi z2t8165Q<|XQ?RP#>$l)V>q4l+nGQqTDV=*+OuxeB{*>iDN!4^gS=mVvG2*B5o>Oi& zJRSsfoa{rh|jya*VX7M(DAK^m)29u>hw)~^QCOJ@)n1$H;rd30MYeHVd>&i(S82w=%({TqNq^`u zH0QAMvGI zY-n(QD1Hd-z5>-@FIyK++?j?7v5A1H24u-wf;kPubbH)u16P$Xmbw z1WFIP1sqgv)bhZ}!@4y#CVAcUPfgj2&!ifHp1=vnpI_r4F%o)mBDq z8CLG99v~5c!vTheONc8tL5w^tXmfy-5U8M&R^P2fHa|Zovl|cvDXcJi0y?zMGOuW8 zezd%m3h6fcRMlVEkD)|QAw&Bh4LidM)SxHModp#^)@`2qL?Po6QI5dRUcRIW>^^JN z*!u7j8rcoxv>O` z?URy25(;#f&|3iBue=Q;+8ll;IdnrjCzd37AhK(yC11kqcUPsoHTF5nG<}>zG-2RI zgnrpj037+w)JUqOt8=cZd9~AJG zI7D(L)piTsw>q_KZ2eT*$wl(7{zfESCLTGDE~#~)X2W%a8nq%Xy8-pTVJmOV zoWlFOAh-zWDw#gd1jl&SUDgVg?I(0Ot|&hJ6&6-lF4ppof3(hlYIuv|0ceU5%M;1{ zLKIckEns@mY}LC!zN(Kd_~3S`(SELekZLwtUjUD46F|Ww!c#m!)&i_ncQ` zLS?nk-g>yLdVPDvb7T0%-TyXy&<@x;QT77sabPo(u<_9lhQP>3XNxPzqj8X z1D2gQjAS|YL2h)ljccdNK=v1x*iE`A5AauGjVC7)dD&L3ZY#o{;&*~Hk$eLoG?2!I zdH2JTE6wwtZlx}yf#ztjUC^C3QjF& zA50&q8Hp1Kv1=F`{Q_jr6Am59-U(oPX9jCF z)^3I1xlzy{XWG6h-vPa#<m%p?LAo6S{9wpUELJ~+nFDUpn{@c^UW6zVv1C2QC9iF~X%ij++nHOb%NZh{c zP9=2s>`=ffsF@duUysI5lO_A7L-yKNxStXqNlCG`aGfhXk-vv|fMDxe$2AU;L3>%b ziY4%*o5|$Z=Q`zir%!SQO>we{1n?3A^Z@}t&)v;X{NDF#k2OPI0cpt>=9GUu{2}4H zF$ozb@E8&Ogw$CBo%caQvVpbh=!BPTv-SD~&?D2Ix^j@Z_(bOcy4u?OnV~_2}V}*D0=)p(B4ugd2 zKqV^ku4li46o_(9SOoW&a;O;w@{^xuL?jJ45X>YS{uE46FK)~m3Sl*Ms%LuBOov*0 zKH{V~6v){n*ht6%erqQp@@4SU?ST!KuFD!1;y9{iqy!Gs9anF0pyWSaq+EVs-t;^s zEz|EF(TLYDpYRH_xcgWWU9vG=PgF zMFN@E!OrtjLxuPzpk@t(*=23jUchkPx^a0S-I5Jyd{{_OR-V5wfc#9A^C&l{zM(F8 zFMg+GiXNnAGUxUun8{@J`1lVdq07{z;!Ey2fzv6c3jaqp@(roYT2Clsgd!TM72~5o zjh%p$$}NqZ@C|GiD< zA=U>|2zFWV5W$}tr(0yy4r(^=8jVWsCJrT1z){ZB)0Fkk~+KJJJCIKl4-H%G0+d9c>Yj#m4nJD>j z2M-?_L+T?L)oi9Ge&eL1xEaIDf9X_o{D#jhAohlSI_q*_R%f=p)dinS@S`}BX%u~YG2C>V)$HLlniz|efo2!!02kf5KeM@e?7rhkOZT!)374lcov{Q~20*Lx}vH`S%@$8w6eZL@n z(ww2Z2J?nO)?~{kW``fs{(s2>IthspCv(tho395*p#GA_99AB-ulkp{?mZ!Y=h*!7 zukJmT_1VZ;$R2colkx8z9aH@z84H;+e`-hRzTWZ9Q#dVR*FYipmRf z_O#F|X5eAE9CW^uD;~!45B%Qct-CV?*NQS!ofX%jat?5dysUni^<&l9pmw`Omxe=1 zSBbeiEI)GN6EiDch$S7`U>&PFv-{FP0TbiBa#BadreIy7Zv&u~^8&5#R6MoXtd<4&shS9S6cwl+@`2!!2Rj8mbzo1RW zn~*M}kqMm9r*hRKBWvP$10s$Kz9!1wTZ8K?bMB^tbEPY<1Ccm9c z?&~Pi0I(Mhf-Mp9%Y9MdfZOjDP?Foxxhwl$590!U_7~pH{@dZ(_AwKN2dOnwPY+5y z)t=}2;r(TX`q}KlQ{Yk3eD{W#`75PkNA^YdO{Ln8~{A_;s7w_cG^RD@{O_8G5=EuFy-pTh?(V2Y7uasLcpr>4sOWa=g zrP80MS5^ISc{;rFQvjt&RnrSk&aGetm_w|4yt5(n^_R&&_#vBKcYEO*Y~A9Rid ztf(QL&sK`vHzrG!@P61#Px0)_g_h%lfUG95ojmmgF-_??i%~8`f%4g{ZeMjy2V=D~ zH~P0P-li(q)CL3_hVL!>{?<0D`Y2q9rs%$K@-rOXp9*{c#r_?X#pzdGsgRtH`t!5zv^9-oehFk;BE$&^T+6qa42 zR_!=^JSlL9aAx@NGmhCK*F!>?u?#B>SG*M^<_)W~Gi9${Mx;Yb_gg3VB3Sh>q!rvA zXjmZb!H^~EMmSMqt5et=^#LX4RMpf9so%#$WLoTucY+6wS)k4+=vux*2>$`rl2g6R z!meu_yG+f^i$LXJ?h6{)rK1nLxw-BrCEkH`9>=T`yZte@86+^Ap+NcO!}3kOzv*pn z>j@03Rk?hGNw-d>abIPi7aro&XTqfnU)Ru4s)06i&>n$E<>`>a*$0Q^=;vz#YoSyo zj#(1!b|8Txn{|q;rV-g0)c1YHh7^BccapBHN&o0<#OSDW{&urh^)6#%R&HUOEMEw{ zNug$7PObM+=**hY!OP`}7r2xa+^GOb=87vS)XC!O=0fe>w6|@oaof2nSC!0G&B7=E zwb3c4dC=hU#pMAssq^Qu@xwB)uG6ihv6@{`-ivUNYTBne1T18fmE27Pv~vhen?8Bo z24vJNAm00$%tuaSYWRms>aNbXJ408zCY{V9hbTLrL@lzf4kUc4?Uhzu4OwVQ7$kOa zxRWhT97xBY;50rA!IB|RXvwd5unvtAb>&;7=d1K7Up4n%| zSwxWP^e52Jg(8iO$B~Vs`|W3tUw_->>m&zYXsJ`dLvVYtedKZtG(4+KJzV&dBmG&~ zz!8Al!|ZX;6LevfF*7Oxumx2-guxo<_O^3meH6yp}59+dn^pm-( zPzFinI}>$Lp22c_o@J9O&OAe%?gNFfCh`d|lJ$eGWy6{5p{zw|gBXvpZd5|m-d*_J z$h}A0)kD*~s{QKEGY@-QrZG|*Y*Ziv%R8fXUsm%1g(&tq1(vStT8Bq+9u|q$AZ5O` z%|BG@&eB>LO^bq-dReMZYc9CHBx1V_&wY!l3>7oNLj)0uFf&R z7!5cDWj{P{oq|mI+v$jjZ{x#E*|zTp)1SMu%XyWNy0R+9_Kr7dYbpbkvPYw{=Y|Gt z+TxSHru&I)ywe+AFva3>rQTHs1kTG@8qfS8p_@BP_|Vd7%pN<=vq~YGKx1=*&smk? zrT!-Po+oEGf;`D<11)PV(aId)6A8V(DR-UZvZigydYWJM6xdkl!TAJO%A77W8FYcT z8eHRSMx?}atGmF2;mev1eW-v}?-*)}TnvqQKE~nCj>u1XXM91W1l}A3MFK~cW|hb$ zL`+vynuZ^LNSDOPoR21MkiDmh$J(m*YR-GrV8Yx?7|wS}c3mw8jxU z2=+3v;6h5qVAx8u;A}L`_nZRk>iWg!$7A6s6CR9iHhn7xfOB!|%E3!J(rqp4_(Zhw z>uW(TUk!tGPyW;LyWgv@8p+N~ZpiziKZeN1q0Ol9XL>hH4>P3Eg94p~U-$yZyd zOHp~OS8npSOp1qoUZ%bqy{HRb6lY@veV-Za)2K9aE})!lLAAOha5I`^6ZztaG>_fu z(^_{VDO?q#`DG(xj274o3#@X?j;nT&Fj;MjqwJoo7wa-o+C}{^RdXvqL0!fs+^IF0 zOsjW7iR+9CTXmspQolNrxs1OMH>uGOyvuH4FweD`>P*YklQE7WXwbiHn<6@Z zFJDD>3Q;Ns0{zMq-!f}FewFkn3#zI5{r!Vjj-aLo#9YceY?eCe`i?=d7b&Xfw{RvF zg8pfx^0MU0a&)x`1kw=)J1uI4XzI|J!+L|^EM37g|3>%dc~xRM2tdC5xv8gl zu9h7#TOPj$h2f=XlEiVQF{OU{re>x3oSYYx-g>~Ng6cRe@4p4yLC2^epCTz@#a1l_ zOf`w+9waw!G4+KN&|{Q0$mt zhP|JwZXewU&I7+HD0xpQ))Ul>BoFh>?({7po&tKX`hI0fjAj6w9^q79Z}rp*<_7qO zXdQ9p7D_g6FM=%!`KE`8*b*bDj*nH&+hR^w>Ge2D=#j!xp)A|Hq#m zHn$?X8oBLC+FYmEJ<6dIGf2MUVppyFbjRk!&m%&kM`Xj=2+J_pVA$^oL{1K|E+PKUXq&HNLy3%xFwDy(++PzIn~WRMulMU4vm>TWW{VWSC&mYzDyS#W%iG8Xa zWe7mkSMrwbHFfA9LK8w5F_v?(G`;jG%3WD-OG3w7ZwIYxwKxmZcTzLeN^`TU&d|pe zZ|^B&d7X!G$IRc5{y5X*8IqUiont@wJb%_+&%Yc1&bza_piMWBZg}1(4SSEBh8`3u z7JsN}t05+mDWqG&NFY)9FikoEp}|G>`u>tVX^$+DT8Thx7ndj6I-_cje*bEDr$3P@ z^HR!;Ty)6-q%^7>qzaz<6HEq=KYcTEbY@&&N>WP&ss(Pwo5sRFVtx3(6ocAdNGY`% zrwF7}@RlE1zV+n%g*{zIi^Ao4uyHLIVV|O&0_<&7&HP(iyV`8C_{L3t*4jnWrcQp% z_lwbg{Ikz)W&Asc2SyNoC{W`Kp>l-g&pKML&i(mh=4@iuo9*jXbnhc@GQYA%*ev+u z9N&ZkmreMz5G#knkDi=6LeT+!Lu|zPFEt7&EVOJ5)fyX9bh9s z&dH2yPc9};-^;%>y#+7?2~?t!Es0i)e=kC}>N2Y}y#j$>)*lx7Y{_L#Jf5Cdlqj_} zrhaFv$d@)YCcmnz)e^KaxHU66wIEN@1zGGS2$7WxE_b?N%QN!dSP&|n6MW$E@%b=& z#-X9+Lli$fm9XL*@34O+Hk4{T&OllqInJ(lg-I-8DC=9No)gEB0@ztk!-ceYok+Ht zLBN7b*jZ~aSkw9`Et;{~f#Rhp}pC6#@}iS{Z*JHthNY*|^JQ%e=l zpMD{JE0byGEs(0xx9R)H97B(eAPsGZavvJ5vEnwJ?8Y99h8h$XtnF)Ei=IKp!p;~UIIvAv_K=+H38s&vY zeffUprm(l6a0Y97K-_}QR4ra6B38p0|qM@wmK^wB6cY~m9@$Q6!DJ~U9 zOwFMT*vfEbP7hVz;K@b}9t@35cRcs=M0*~;@kMUu!?_#g-ToZ6nk_9Gt1{O=0^7D2 zo4kPD1BfObX`7)?tap&fWpJZz{p)ln9%D>=**xR;cd%0pa3n^)K+u*_Z93;myPdt- z(ipx5^AYDBBQI*gEcSvAMleW0iw?9vOe49It4TnHnw^bTZ@{^hDDP-}wQtL+POd`S z$-1)Zf3lO(-V1C?iJ#Tdy!r0-`k-P2ich1X{iHaa7k5K}A#`lLF7)Ti<6D5LxR=ii zX#7Uy+UsV(F!WNYIxKvgmF3F2X6d$W~InL5KiV+Rhr&NPSJrqYO+BR#QPND zJDKdnVSPAdj50Q8i6e}K(~n?_%j$Yo?HMPY z!~UWAo7NF{P$psvqYgi&LmPq04Htr{70BAknD6Yl2nS3%yR+nDJ%k@IBk4amrJq8# zdVM(K_ip(2>F9bTXS|P@(BW5utkfB5Jq0pdGE3fDg>Te3Ntm;rWCFhuIm8S6Oluyb z1zgkreLsl&oOHq20duF(g@|Zim9vG=0{xKm*^miAgxGk`6Ad4^RR5z|AwP31*MgT9 z2b>1%Y0tqjN;u&7dAFh&>fzo88HRHLvWlSy$ZN>#FW~E4R)2U~@70Sm_B@-Gr%7mC zYkLvF1z-ECFB!N}M~|x3p8t~9DYTrmgwr(=Bn27Q{LY=PtNo$`yB7Hdw@cn0QLFvd z;_F$UrC192^l zq`p}?d;>{1q%%z$SwS6aKr34bDHe)__ftq9Kv;{7~ z_EWI$aiKQ9WZ1I;1|*`dJ1K?(}EN;Rm>Nl|Jk#U2&pXSv4;E7U@*;<%!wjaH1uSlMY;3IHgjy0$cs03$;Ygbi0`HYheIum=`wu&2L17DAqX6{su zNtS(ZS=U;DJp>aBtB>%jSk)2}W!nE(_eqUweM?N6h*9>!dAvjao$w)q%15jLt#!8u zCil`FEXjBWYN_bQHKPF(EON~2oiQ){ph11_`}dJ&qaIRHDCeCZRTz+O*kHj|x;^u- z<``H%Bs?}f%HNi>{h}#~GsxwE;m;7=Zyk7~w1+}2DOt;Gf`wG(p#%DbXth=?^lJ!A z?1~U^4lTPL2d)jzQNyWh@Monc?+d{RM)4M9w|YNZA|N7k>x zRI`Yr(&VR;?w;Yp-QM90`t*kEyhA2GqUco&u)gb^=qImPF(AWg*e4q~q!zT=WZ;*) z4BCCOT9a)gH{PX}vwfi?e%xqI$Q?d+WFHPCF`kc|6RZ9hHft9rl18XTj%%lN|4_l1 zMMH}X*Ihh1Uw-hq1a_4nd{v-@D$6^*1n%rwssVK}!lo<(Om_P=4cO>4Xvag;d0e0< zzt+R@m1}a@!Mn((FWdF`yoN_mJ$1E#=9b46<%80T@d~gf=rlM*ECgV4Go7J^Ag_1# zsWLHbHd)(C-hWsKWc@|r;X%zQ|K@2wq1t!8DPXq`JP^oclD!cce|CJo9YK?Q*=0E; zQ49@fdKjexKh5mUYm2K>Z#A#JYVdu>_op}&AR3l<;_{mpDaYuceF+h>vHg(ir%`&n z9(dKZ1LAy{#kx%MrB4E_+WfN8xuMIiWr6D*RnfQ!5fGBJJKk$ucs%I}kr7kJ7{U%s zmJJey*Eo8lwT9;qwshyvPm2=AC0gOrT1@FYsSI((wyPXb&j^X}ZG*Uwg+o1-qn)<7 z$MJLmA*T6m3Y3o2&YejUq#J7e>64c^k9r903HUXrwAf0~qc3LU zNrHqBYuWXC65%H#u!mOZL5z zx&Xhk%lXjSA0IBNbY{+*GFz{=+ESME;w?AK56Z7!e|kIqy0@)$#l8IKxVv{YGVd0^ zbm(^hMt}`~4#NNRmX1?{dl z{$y+GR(Do*8W-YKv=m30Qb`FZt(_DeWj{fLg@J->^5Zw_O*%SG)!qj2PL7?`m>c(n z@e_u(HagOEubs+6>Q0<`EY(fFuXAtq3L`>ezCkr=ciSqE_kWG^PcGaS{_gOU6s49Y*oW)-@b+Y6ypT=}gsjGN`z^2X+R z=b2VYiBSG8>*ySu7(S>l7qIc+?m_u}xf6%X(OqT_k(mxaQF*ckh6Vi7-QuuW+LGnf zqIp>b);3e!lJMhujWJ5CjIziJa{l%+O5!(ZT_nr;%)5X0!KNNsdG0(LK(V@estAgO_-2#Se zldgtjN^CSrOdPN%7fN7U7qw(4%DzgYSM&0>ozGX!=@5>f(CH95%=3koDcY5(>y#3- z{H%qGskzMe_FVX>9#Ebp+QTb{yFiawonUFs&1%Gz>hqRWFpqMOB7AE3t_GRjptO5T zH#_!G49*(x1;k=3MmW%c+GXKTFhiomSs>C7V)HQQ`8eNeq`&ew?So8g7H;%B`YkV_ zTRJ2OF0*`0JsiqAyV(kkKN;H1wjMcp1TWa*jODqY1VJnT_` zdtD(*fs`3Bv5b@@D0S)V4#LZ)Fj->!71J#U>jK=s#hHJ%OG%BzmVRAwPlG2BnpD0QdYxgDz%aznA zSLP;O=VLcu|7!z+#SezxDzDxKJ#I%Vw8F@7`a@O&2xN=rM7ZYuX?quHt6Gaql+8jNJ~nG9 znL4G_>8`nJN73w=j#Zbc;%j#koG5I7V&eK@}VyLPyzFWDh1{DPkP zD_+=-!iT#L^4yrlN7T;uG>z{3AFf^gZ}9)w_0GSg_2c&W6fTYTGw!4ug>ag$J6Bek zgCd?IIft1W5|+yJP$ zY02&8VLg*OHJ0bQ5Oiltp@%t}#GXQv!~1&c?m%CMxCW*!Kgi2jtkSeQTi@%HLVJ5*hGN`duyBWN?|qx{32Wl1YtM zi^p8Ooh@DI8io}23siXbDtYm?Rk(65_k*#|MLA2jxt6>I(5qbC0xtIHuP@zi1oQvu ziSX`vBi6m1#581L80#HuYThJ4m&;PNRig)u^|3^&ZX`p$o0dy_@dv|3Co+saWhBitH(;Eof_`&vPG zULN2;LDL;_3n*v|UB{*+!+)=1GdSe1L5vX1Tfp$N!-`be$^7v_D2xOfHS4uWLC;1# zT%SVq74Fm3Y;F+zoR4ny^C@A|)stuvUuYT^Vfp>wz6RG@0AsfU}ZO9`M7blgQMd26cQ%eVgLoA=%VZUV5`U?JFD zR6_AoyE&^VQfQ+s4#}}Ic-iS4$-}XGi(L^<7xaFj#p{y=(QGld=j7wB)QJ29uKZ5xl z{in;de-K8}`7PjdLFnqwI>ubka78dBLY_Tjyg(kQF!H-w33d*i!EkzQ3J{D0gqeHp ze9PTYJt3`us{V{;>cE8E`(P34`xYuBtQ_ccorPfn!q`aAHqRC@N=0jEN$q(b7AEtC z_BM0f)w4>6tP>8q)^w;^>6Mb;;``L9NS5*TYqI~ufs%hznb%l$uyKN7P^@U3T1UDy zhS}CaYII1ytXL3VP_97{l|*HE&Q{l5WI6cbSKf{SV{qN%#6bikNxjtLyuTS zxglZ(En_?DhneIoO&3~M^|XMzkdJq{JJbF3pJ{J6C_G>A z{qNXq`D9xqi8lYgXMrlu$BVQQWrzQT`hMMJ9-A(124knKDI>`7Y$($EKVknmCAW2) zHeUV*OCb3VxSswoB4Um;hyS1jO#gr?B|DhIC}!~AQNUdAb|Q(9Pv#%R_KzbyS?iym z(2G+g{`>!e^(7e-QvYJY|38?Z!WWmNAHC_co?rJqELAj|NUnn!RXs0YN$lT^h% z&P(DJP@h+S&BpD#nt4sQCwV2Zc@r*ac|O60aZWiHPq`#q4PAc{imXEOjLOR*M+fWa zf@}+wk;=aoOH2{t!%l%HQM$}x=X*=aUX{<2Tz!Y#UZq~tafK0IWN;<3{6FlycT|(z z)-Q?zA_CHT?q(Z_XPMo zfaH_@V#2qk-$Xgl(-((izlqqY5|l`*@M!7?5X1DtNdYWVg>rv*@iUltIpP`39{B%Ce{q+mMFYLc^;MqqZ03l2=l(>B*!M3|Fzf&>D(+}{KhW= znklbC>kTb!lounX+oyc25!?pFN}nk{#ScKy4k0H>cQFI{KVJ1E<=rnEDt!>?B`hR9 z8e(cuG#jPJvAXw!DO)VqHXt5uZvh`w-zJ;@pqyy^f0Dz>20PTp)*0Ht40+S}zJ7?4 z*|70UG6n8s*mGI=o7N2nM`xR`)U+hQWZ@9un+FInS-+~nfN`^_jJ!>=#dfq#*pcr( z6|8^pJAr@%PUSi!)q(|NB)5<+%W(aK%oP`b&vc@(6jvK5Z(0mncxfyy&8Bc4(E@X@ z<2k_9X82E{nQW{lV7XUZ!X+x|uljkbWV^_RoeQGY>k*G8^6xXXqRuFWN_yIx;mXd! zVBW8{^XNDQ5KfX)+{8-O<**dgs=SrNWL^gV0E2pHCGWL&Mpd-48NYsaU@z5=U(5aFL%rHaWXuvy=85idKF)xV4Q6C@7f& z)0movc`9ZqnCfMaMvdiHh?>7|yG=fpNW+>;W1JBM3{~L&P^J%ARQLk43pXTPD5DA% zCG?#iE zu=t^#@yw^+*;}{6E0+86k`L(=Gs=9+6v5U+0x7psJKrbSKc#>AxTi+B>_1)N=L^`X z-TNyPy3F0ktaoU|A-*+c8R6U+Xma(P2j|Ghw;U5+KV&MqlRxP+Q;AB ze}yzJkr1!qiF^WMayH&Vdc`nW)~Yor`&+Ke71fSC^%X74fIhMi#@Tw6t+)SJpPD6p zV9MmU;O!H!6A{K{iDl_65&wGq%hEq@D#ZdZ=zjDhdx-=cS{>M*qikqf1F!J`VF`|n z3U>lg#}B|FsI$cy)N%2dL|KDNcczl{&k6VH1Vqxwub?HgL(_uDo_z~=s2orP5FN64 zU--)t!jgl9eQ(Q@o8?B_xcUe5_J8FOO~ndFolBxv#;s|$Zkzfvi$tfyecm#nk&ac2 zy=|!+0EflHJeQ&s-dl;^&S2=sLh99hpW5Wi=S?!TaKCzwlZeb?`T1T2=Ez8Wzec)5 z#pojr9zx)SN%xJ{cZSw$Ru(>a)~nhlF*f-=&M1uY3k&0{$;OqAQ@xOL>->dt{PTjvkb8Hso;tjZ5d3ti+qm^(LoN;0xE9hxLOZElRae;wEa4#?l@3Jr9|2a`5 z;a@<}-W5@-e}_O5|H~npwcf=wKRa~m&@RqO2mz*Ry@OiNr_>lOk+8b-Yh8~Z6zM}cv$T8TFneeF!*8N?;27l0JA1rBcsFhR z-~J`@&z~adihcu5`#E1%ZI#31_=60W{)1k7PCk7^6=rq1rs44LvJU0<( z*|*Lg%u4maX02=3i|t9{n9Ptev)0%~ef6h6QFBNBTO<)Ih60Xx-5+{8a17^-D@7 zITwElnYVl6;%n#rx9oFWa`PO^D8g9kh5~d%*(WIbJW>OfxB)0P3xouY)R_Nt6t3@7 zxVbkPkDCHX-?}a`P=?qq;z;@Eepb^*`HVcvfw$D$075SAo{`IXZl25VOW-W7^j#7Y zIo{!um2{(O$C)ahADV{-T#%d z3*Z?4b6u&mg?dS!Y5Q%QDQxgZ?fub_aX_*2@KJ?r!s&nOSI7Tgb}1rG$t>YZPvBT& zpq7MWrMOmo{DSx@*tEd$y9av?5r5;a+V2WR+5g!Th^-uAFoOU4w*~g35jQ)L24@NI zr8E#eUF9F-_^((0-_!;c9tI~l;^wl3ztV6&wa*=7GR?csr_J!3`GJWT(P!RJPLF=& zpg$KC_dhIQC)ACuMZQD>V~6)JejI$%??!1)g_FrhnZ?{K7-KF?cg)@E1|G?FN{9@i z!>AQ5AWP-nuZQsO9^SEejGVMDBdY%G9w*CWcRhiiA`ag!0oW{D%#6}le8X2Nb3Q)-K^CfR+<2$6O?q;wYEB$em}guFqIJ%m*=q^c`K-|O z*7oPuscX(Cgc7^Qs%ywnhxuvb!1S~-tgtE{p#|WET{<0a@0&f>WDdmiL%et{A1LIm z9(*HvLxjxDX=!PwX-LN53>zC8)ss!_p2rQaQPxUxkQxySWDym={sZ4rH>Yc((HcnT zCS2WvI#=YI=46p4D!FU$(N)IeOVz})(A@7MvdsNIAF%$50qVbJmLA%e)?uwf0ix3p z&TWr5Sp10klyjR1bgddAI}rA&p0vKsFcV&9%&c=O%1beXT}#ZzY*pzV+=+IjDjd2z zwu0DEbwGFGO+c^CjK|7OygHrEo~IBj8z~Swp!$IAeZbY3EPGlYF!rFe(DVLQ@1@g8 zZkJC0&2?DkKKe)DDKB^GHmZ{WpFp5_vD^xZF!7R8QK@S_JojcC(zD$~?P}lH2;O5@ zQhn7ZrCy_@!b9I6{(T|>MI8=zf_is(r3bt#@Kc{S_9sK>cBGW+`nvc~Hon%%z74s{!Dqv&o@i$Dmzv(KO*|>?2+0SczbW7luQ_UdSZoM=UAr z#s$wM>niL27^gAZ8TAQ1l0Xm#L_lS~HwzswADPWVi$Ldr4gyovu;4;6W z#A%Qt&grZATQvO+V*D{jL!l%m(X2!;qslaFk7bN9bJuh>W4RxvKbrf}oW%kp-suTs z)IwKj6HOMPHGs^ke)dSn!R5#(mY(?wRY1i>M-L$YxJ`6M{)fuyAT!i4onTQt`k!!2 znUZ4u0>0~&SumASTG$nuw1xz2N${7PNFB8=P|E(jlu zQfBWn4&cQ`FRUSYIGuhIsV8qdZ3f8nu(od&OE}|~beiTYzQp<5>;0ymJEM8K)aD4V zp17{z!B}Z6T?#Z)bRIrIFT$=VX3j8>y?7<=b8W{YQ(4rxU}PRv!F@GFe`(L{zu97IHygMH0D2bh6t&;4shB|SUI2Cxq{?v3SQ zR0_(F`1ej^md!tQJ#|ly6kdD8xhdDq5($QI%qiNvq%2}gLnGMzv^ua>5d_HubU-9( zi4I`pfe57m0SfT!b^`v5_!T6?sG%V&5>YF#HGUP%iul#~;0p`&G3g3FF0?dd1x41Auj~vjZzl?zUU&n@kmn%MhfBu0|d_{fJ?<0PhL z)X+>lm5Nu~i?`aE9Eb;3c?d?#Gueay`|~`&S90^1QTvgx$VE61zh8G5 z1QGZU)u8k9;S&wOH)6B}R1&e(KLfb2M!qd2!q3&XU&2pLG7bQ-!pbJ~zjc4f!vPM2 z?6gi1fCDH(1<0>GJd|JK24dDG#Xy;oIE@`Wz<`nt&%o0`#XKdH>J`;~Su|i77+CKM11nXCpU{89Z*){8wlH zYh?eNJ^$}c<56V3-nz65BFfp%RT-!tbz+n*lE2Uota6^ec;x&&{ijRG6W|mc0jSB_ zj0t1_rr~IT+9Ld=cU`L#@54-0^>1g_hz334XE?WKUyJ)b#Ht{`Xlkl0CS+v_oNff- z>ma4)qNF>v5=U-qutgPM1`7g~5UHQ!neVwt6MoYNbq2k%^10=a4tLl?y88gL+t0PU zm>WGll~3ra8B^cFs)#TkM3oZhne>tw%>Jx*krOlBOC7})UG1PT3}Y)AV{ux!&Q@eWSd%40UG z-B;VZ6%x+L#8A(G;voIT_pw;bdj>qF^w$_nS_8(p>0=_&t$VUV;B-nh{?s7>Twl>w zJvGm5AbCg&R?hQ`uby;V+t1Q0-ZOoA%Y7|E)rDu3BD)O4(4llLg1_yV`^-;$+T>9i zRAswBEN&C)ITva~B^2__nR&u*|#cWW6I1EuDc?e*7=M zMKJ3wnk{mdqOIiVsntjEra+*wFfFlUKJQ!D7Xk4)9)sE75u_yDxeeX}1qv=&d0b!i z9^+VDKbc|xM5qCj)uWf>Od@$;O^=&4L}Hu!`MFboJt1`)ICO;gBOJSYzI zr!@G~!rwjU>hxjKJ~oK6RskGY-0UC+P&kM~g%j^Gz{Q6hoglDMvu-wW@n^d1mCI7s z--pV4s_YBrix!H1L6>un7BI0A^IV(HSoY%bwD?JlNYhf6i4@{}FFotvzykS~FY;B< z)Z|W63X3VO7OK8Q76aZ;-sp%sS}*O@k%NZInEPaL)=}`QekN#f2Q=ti02@qkMKU6Q zZ!?=AfZNH0AWK#5U$fnF8eZdLY<`!4Rc4cESxl7tWa7@N_@+};0=b<{o_WBxhDKwB zi>W-g@&$TJ!Ek!XF;p{Nv830n2Rbz|5*41QciIoV*le059T|`48f`N`As8rDP?=Ue zCWLRT@!h|61_?stsuiWc*B^KIGysiI1FW8E23}8_fxyY|iSgB$% z+eW&?>>rWT*G;}`nq*>?SdI|a)^z?9H$Q&|R@mtle~IESn8BTNWD(gleGbTX?LNc< z4G5?aa=N9+ZYoQWTJhAkxyB|?E-6u|s%}?pNbWirBmQoopCi8OBOZi{!MswDnr=f- z!(23e$TZtypJBN^7A)#`KseCi_LC3EW3?AwKJZds64Ij7`xpQe0J?fiMTT?60}5w7 z*TyEddzZnc?@fAUsv7)=K+aOV>}4$qeh}QCqw1_OxYPUDE@fTiMbQ^#kjoZ(II2ES zr!0h=L1bK!E230NZ&?&myRy`~6fuI$=;yjT+QH}4A{V|4qWBpPdwkLcammqmQ}e7( z5?3%cL{elatvT?`&`yl}i8n`dHk+RXwlSI@3_fMq#8kNd4L{2R@{g8$e{c13O%e3JG>Uh#(9z8|hyp{=qx-TKV3fc-3=C z{=_bRRU!Fcm*j_bcRAHOk1>ts^wxw&1D`?r+wfDb_No>qv)WJHK*6g2I}bGDNj)|J zcN*Da#nXye>TVaBHa;V%qC^(ve23=8U1w2c&}CO3j%#b8nxPF*Sy$~z3vn|Y{j8c+ zVLmHq{F?|hLIy%xbPDE_tux*=7!#ih&t-z`G`LL>1>ScaF@U_G6X}=NT($aebK{W5ldq$S7G>}AAhFDNHR@&M zTzZY>35R^--Rpeij!!lc^LOHdT3=l}&utpMP90`FTMe+oYQgjB2_PSwBI`KEDbX+I zo@+GSyTL+2E0m)I|^QDFKE-Z_D}#}0o$>I&1d4N<|Q(~OGwts+!EQPa2!*I z<7Hbm9Td1tE>9vJlhN?O|K_V-VtbCcwa&+xH@gV~I&Ccx?CrYwI(IDJ`XCvkG-*6v z7E^G%CYE_S4*aM-`sR>+&A7x~Zp5#rhXG%SKhf!}iulM~FM9bQ`@WgsA8dYq_4&)J ze}7M$8dg%p*ew5wtl!6L`j0F>zo|pRf@Ai}PxkY#tWtjF=ytXwW4X zdp{dV03$$R_qqb27S?YwQBNV_45jJ4_VJmWdbIZC2g|MsCZ=l4#6c4T#`9a_uiFh& z$J=;wX2C5pMw2_cM?;Pk`+*XD%(P-JgsufSE?y_&6yLn2TXd%-QcQNkfa$ z6w?hKLVlCb6;L2+Kwqc3^dGO^h(TpXBs zEnwW{&^?EebC$ELEjN~fs{~VpifKki!r8a2Xh*Bg)gX;uepJ4m16QGEqcfbjq($c5 zHBt>YMfSqU0~qlGCWIxdcaR^bVhe8X3uXA>KbfV%)-)- zCF_<9<`V0y{^4h^JVL$A-10Kl)WUkxt5IrB0PgnEhu>b!8g=a+XW5uY?y|?C5C##f z1TNqt>sOG-tlSLIT$3})EH_E+*No9`D@mGg=Q3R3IVwbu!PNY;Kv5|VC)7JV9WF9e zO2zrkUN@KrU1aXbom5<3qgqQAjlC`kTC<^xpH+<6$vR5O z2DH4zjQm~%U*5gEe%Sk@qN7>~X;(W`++EHgI3Z*E{bSNIatg{~T>;cs=1}UO z`U`5^$vS!lSGe5*QcXD}iHQWfs!-=^K@hq~2wRR&V8a?vm zc2%GndW|jdqfs6d+EF&E#0M*})GsV67%l~)swjZUD(T{#A?Mur&m7Hk3lCjh6wYZl zK$z2a`FBDX4K2Ta8-6R*oA6b(_22IZmwL`06MpZanc8fwr=rI#dl; zMP+{yvjWPIEQPBuZcVl5&8_(qjvte{K{;aF`&nO_veJCW48CgBODS^U-AxevY!}f% z&k1ta6`MSh_2)wTur&A0j=`68-wkp6X>*GPm-T(K`>e>pvvk+N!M{Aff5fivT&yw4 z898K(g?86G`1F-=HztWVkfd+4O?pAI=r)ll?^BNTr@4}V$9g%lbCw&E3Cq|Gu%07y zk#%c`FJnO0mh-T_?(V#b^IS}_IU zMvvr83IwB8UOSV<4mN;pQ9ltq;u0LGdDJ7)`}|`JqP{q}rQr31PDNfp2eh?e?tCNU z4qSPt_JfLC;pKqAj5nh4_USGKYZd#2UquDtFP@j{dUF|Nfu=56<_3?yV>`KVLvU33 zz~}}(_#%V*Tp*cEVQJ-8woOMSYNS}wF$&IWE|=~joMy^9CilRJEbY3 z-c25;)T0+g@ZrAb&QOynGv>Yp>fT=mfa#5cLojAkD%*OEom0Ee^+Ptrv+-_Q_Z-gB zdv?4%G%6dp!J?v!Hp_af_=pwMRMJW|i3KEm9w}QC@tR#nbV0Q@g^B9L^&%U_-V)zR z?GzI)oY1owR%WS?MD@y#Axge_*yxbP7t5?lzAF@EdQ_W>Dp0T?b zHZGPQev;na=o?~1__tZq?USrUO_NaZo!>;GtOcSEqedL{Il3_%i+H#Ol^n<3@h2KEHRtn zBXsSzhPoTfz!OLQTsX)LA>Upft8E(HOUb%JYXj}LqU0IYyGltZeHQ^EJ&TM$nZFQ+ zj!EZz@p`b`y5n`p6T10qY%*^=z`y8c zbRq1;CYtN@@i!hc5x?2%iKVc83$eSi>3-K*bx4U9y&sk(j1Nbp)l0Q-7W@jXtg1TF z$+$6Pm_ zG%SXn^wppSgqZQUL;54z=e%9AKc|+ipL5%Id|a4$#;)ae*BO`Y)|ln^(CYxYME~AZ zNL{4d_{oav!msAiMKQe1@(8;X#cv`Sq@KX~k8(SR?_35*Wo)K8(9K2crDG$VmsazA z;h4vdVTZ!d%ijk^68x;EWXcvG))T0B)3*DdVCatcY6O}fDmPiqg;r<6^$e$6i$2h( zYh4lWCZbSkIK$;e0Fws%&^Y6wON`D#>@TsnTahQ zZjtd%7vK$+$?LW=Z|aJ$`{sX&{+#8yeUTcds2+1)IE}tLhZHtz-7Iiu{2CFmNHw}z zovl}su*>GZ)^{#XZ)suic-rJihL2scsjYV!lD}4p2{>uqg^pbB8shJ(hlg>=VbrU! z@|KhP;>Qpwa+&;36XWBrZ@2|i=L<*;-=+;bNi*gf=T6hM0ecqpErrF+Eo?R-4Wa%X z#XzQ2T*-ptX$<1!fz#tnp5#T$NjbEweW^8tj!bQkW4gh~8qj>%)|;$~5-`CJ`6Fzi zo&;QQAW<=So~ub5mNt>A)%fj+E6cUPM!eecRhhC2&bpJz7n8%y&3`y7w$`FQ4tLC@ z75c0kP~U##IpbqNrz-(7H{_`K=?=eE)n)>U>EPL1lxS^iZJpRVYDy}9ABWuWm5npI zV;gV$zUGre>&S3CIf+T4l~6T$i5}*n9%HU(DPhflPm>T^i`ZL}5SWObD37Oq)BNR8 zDW!Tms(aU3>_Q#|oa`o_*H6yxLd2xD+y$m<^*oj5r|EKdl;SP4_?fy^H?l5Db*wMp zpAO>Yo-|a?x;YeEJH>)BZ_N6XkTFX=ZzlUj2O7L@G%|SQ3bkV;l2CWu1R@=de+NgO zxrA8}eZmk*NRyI3`ucOO(#>hz`k?9{*7P!=_bRgJ`M{76#~x9h&wgN=&B_AfPNnP( zF&8D}Qv96x7%i(kR84-#>g4Iku4GEJK$JS)K~vOaZoI~5I&dh z-FOFRej@8<3!=*g2=UaNJ(zdTe$=82HaqM!aIvahg;Q@QSPq<3a6bi@4Rh&a=!W1o zFXERqZLXp&L%rE`h&&eYd3{_;E!c{Iz;hCrNT1gWT;gWy{+2>X4v;ka2p0ZlwqZHn zuG^-bymQaty;~Wig*RV@(py^7g42C_yg6z|uCbSe);Fea^MQHl!EB4nv9~G5`tt+* zAKL**h9DLnwp8ubvout~H&=ItmnA^L zdI!Jh=AtrI=tKHlfa$YXBBcm9?Ba%t4F6!JaldapY6xV?Qd>JY`ND2I=B{WQ|Ab!=n{zJU^!n|o6u3U#j z_)Mw#WOOxQ?Q64-lDh}vQg`R&#qBtJp#R;zK^^Gb2*t9n3W4-5H|M?z=4v(^DDg7z zE7=wsZLG1}MM0mKX(VmJ=?MTGf3I1h89xk8pgZxC#61IiUO2g&DOwAkLVLuU<_B0G zYpD>|q1ZfCb$sTEVzUf4{bElcyjvEn>ukw}{4TQ27X67@$#q1aUzH;`oqqixpDK$> zo7aau7-%^q_=tT|cEOCg!Kuh>((}EWn;Xt3eQ(`|HxXHvb-yWs^wepZPz`_P1LxL*soI`?VcB~f{cQE{kfT|flSYugw@n$F zH;~+NJ!plR4Y_Fm?%)|a)>Rj^lww6U1?oyIRK9hH2OF!Ui!8m3&~J$96lw-I;WE%- z-d@uL$qenLh87HWc>S~TqaIHeS~t?NR0qBlzjv1|2}S4HG~?d$u$?+hp!S2xD`dxUIroiXC8|kijG8pXkOQ0uNSXdKP2Z;$Y9%Uq3L zydgxFKbHGq@5+}!x^rHf&Mk#t&8*h_C3-kZ4KexMF^%VcqTcLSFemG^U!LG(g^X$dfPsPsVPHh#F=5^n$xJjzrbd8Wcwi#c9(FNmt08Fs}qJg8rqT?M~P^byb;+XJ=T zyfknhjR-mgYW$)^>VwQfI6=K{fxkSmaHk*Zju#UsgH2Lol*s| z&(mW&!u1J;HPMwyx`nW)fx=D53otXz^{s!p2$RFLs@02&-soB07%k@7dA-zm#0poq zLut>)g;}QrRnBuZ!S6LM-kKH`N%Ycc)woEjK(5Y?3kBOMdSl!my7;fCVZC0abQxo_ zDC6|#-TurMWjY)Wy0;D{HEqMMCVrD%)2hxA)DZEiUQwz!Cz;qi2i(9AEg5)?)i$wt znsc2fq+QoROd$SF)tb#(pFoiJX>y#Dm%;~{EJi$OA>O3-)0QzJPbCB5>*;$f^V-Z- z{(KsXno#by9h>m{L566-&qfl4oL@`5xPd8yE#3wy9(yGLQzyD139Yl;7x}ckQLUk~ zwxOmuQR|hlah$Z6Tg^%JN7#2m+IYo%#&gv=%=a>;U`W1GIxZiFfD2Y8F+A0x%7@kt zd&qpM`ZjB*W*_%O(QtX10@yJYdB;5AZD_jqQv*t{#9fuXu)3qb}qQ`*6%i zfkT+<_P&oNrQ7U67lcFCN4zmbtOgpLPs#YhBJT=O45R|%-Nz}}ciQt)SZDGGa`>_( zsZAl#jkmeho_xJ1LFO~m{>i4$S7z72C~S}?#(v=GSliv4tv8WF;9Qv1Sr2^}injfm z_o&(Di5n8#Li#STO#$XrLxbm1cy$%-Eh#5yo!lYendEMv@j7;>kj6b!x=y%$LA)uq z)2gTjGwZ>x=OGuF84oP_B1gg-kWBSxQPHJ_tpu~JDK_Km!dawx?N>K%JI;|LhUhz{ z_jM2I;!i|bD6c;VQ6e;AD{@?}kK}K%MYfNGNxT*}(eQ8tC*E{-EGPLRiR=M)y$NBlsDVOy=ZGJ5pJh)eL1L7MDPSQwY zPwI(ovmc)B8lf2!JI;VJILi#aQ=@hA)$`_0S^a2Wzt@@h;S80?A*yq~smOcB?z#S~=`)%c*$p=8 zesir?N{Ti(PU~K_kdGh1r$6R##;)pPW=nM8GRaF|2`#&qV{<2KM9S4!hg|_wO_+`} zpvdDE;5W`{^jFC%wk+pcl+Uu&d>h00{plTajLZ|u7-qq6*;VAoXNm=r9-u<;0PZ-a zVf3h#N>(C&oc5T)yQY6TQ)SaZOI$EjgyeRb3DLU`d#MMOh4SCt?8o?d+AQnn%ApVX z&z>e2Px6oNir3-CwX)V7SL3b-#LHfL_|RGWt)?(V{M|5scV(K131OGFHLcLTcCnb= zBH(sUa;2%7y^O@M`nO~I^c^(2WeM#TH zC+)%>&ZPO}`5k_q;sbO2)+3f|met8NH58{}&ne^akCd?dNuE}phctRNDp>@>F)faS zJhDdD{^yJ}IX>#DeO>|5_#m6*xdmY@1y6ue9HQ%nQ&hY}mcH%uZXG&Jf1i7h<%ReD zOV?7L0pX%^!M1Th%woZdO0pj^{(Ab8o)j68_cn{=D*N_{q6H9N1hVI2zw=+aME%`c zMF@ZSZ~!MaMY8}dA+d5ypEDj$Fl>nZ^3mAnil$(MqmRg@2={HuqQHs+OE2bj8;NOj zOrG1yToqOJbtj#|`=~R^SC%Y!%oH0Zt7YasOO9V}`{=N|AKJJ=`Tzi>5)>Vo%mNyH zDji$CC%AnyMQ%wyo9}a;b~jtpei6N*=`8T1tBZ=gMzei7PG1Y-e!(Au9O`W(XIP}S zzGgivCZzKRuwWs<$8rPPov?ax!5 ziRCEM`AEdwQA^z{SSh9J}s;+hjcBhr!NU;O=-i+!!x zMKbzokS5*kTQx0tO$KSgfpSh9&%-g5J1{+y=5S|mPpX@P2i+S60Q}%*o(^PNQH@%x z+QhAG)=Xc&Oer>XJ;AXT z(Jny2Gq|R%=35V|3Q++v7an1aBhh;FutMdRAnHwT1>MZHH?DQ!oC9%bc5oHl3EIn{ zH%lwt1zUOS8qu$Hw%l1N)>mUB{oiTL{WVMOZ*T>o{}=ES|JvqX#8>?Pl(CSQ72wg` zn40U+Dm9g>j5`@FiVPb@_H`}3SXd|R;#lDr*Fx)E@2TiRmDKA9ObJH~Gi6QFMmFB0l z&hP0tBfc7l;e%BKG5h`53_3YFC<^796dt+#)8bC?a|`_eV>g1|f*V;s2Z>tj?&dbGj@c7mk~QNFPbMhzTQY?Ht&0(_?RK!;0AHv`wmq1 z`;WyIgb;|tlKGhPv0???6k!Ug-lbnz!Jo#4CQZDeD)2nz-twBxrMNFuU#ugu>zQ{o zO8j2pGYifI{MAPu;ca}uHqZi7e87PIr%vy$@w#TGTkGtRnrp~Ktww`px3R=pU*?^@ z@&y>({;7Qw%<+rDP4lL!U{?OGc4GJL zvRGU=~Y!5MY_)v z?#s@#DkW2ox+J1cH0G2aIH-m%sZMtzB@ehIIM!D zxhee)#yct4O#LzsBNa!KM8L?Hbu@wFq5|#+Vmp@rc%WJ>-}F4h79o^u%JC-p*q}|! zyc&s_YF{{i-ScFC4J*~+W^U58e(ARbHBDBc7i9o|qMf?yP4>GTC6l5>*id&i%j+s) zZU&8lIl;V-!FQ>oK-pW__Vg#WrSf`4?*UWWJXb=_u;&Y#^3 zL+)is$Wmw+y`zjvdvV*ek0!A!QL@o(rD*iHDk9(pP*TGa-`Q`?a%@lrOX92x__<1$`bJw+d@%D9bnM|TYUY+yB|=o z7i}Rr;XA}9aBz(5%Mk!9oAQmQ(p|dGGYKz`LPQYcF;}ve$55h?qBrWYXOlN9Yg!d7 z-~ak#t?K4ooxBI}kxN)UjofH_a!}Fss;l63327j49c@-_3){I7#$lHnYrgV;FnF3=T@0Se;=uwC@fhI# zVSUD|*OIOn$&wp_+?(5_x!VwD{OHRk8J!y>cV>gF-vGLMu^AxNh}T6|=$T_>(mS%{ zBRtt5(7U_5@6G1tI!|>!r-5MWKU?V8{i=7^Rj?ns2T>kJ-4W+*H5a*^;9bS8Yz~5g z6>MOffpRn2X%k zmJS9@_yIWF7bTiaya3CT?WD!Onb5G5!YvA=UVS+sAryb8XG;c2I?TUgbj_n_^@YcA zMEbYlg85*hKPC@xiBKoA3R}R;Zz3)k$#A*b_#T5FN_MDD8a%%xR_%jXQOK^Jl3;>X zO>;;1Q8fv|LpFc&a(|Uu;8~=|zEvv*gx1?-M`tR2hSQz#yxk+^r&=(eo9RT1UY=wq8>g zN2l*yHtfNtAl0zZh z_cP!Kb0G^c2S^ZYB~^+T60TAWS=Ng8N-0+2y^=LRLZedV#~I7i=dp-TZrlw=>r+)K zVRq4|;}!rQ`zIfuC>jw3Y5@4@_Cw6pMuO`&Ra_XhlBHx!0v--Q!IWh^>JFm34hB7> z4=yc#2qY^@v`yU^Z~XKb!1bQKK6d`MTv^u5<~*@5=%c^s~t@x&Xb zwk!CXs1!i-Cv_4~6(dx?iMsC9cQQk~vN$pOJ>-&}6tg6*JzR}L+y;c8w?wN6pBayS z_{pFZTs?2T*kvqpem$G zZV8c_vqdM|-sB~HTAl|GjD0;L!(kVcUeANCkB+jomDGqUKa13;_J_>y!={P7<(Atk zun?mJ=Qj@EJKt|oNRFwgPdto?Q!H(J|NiMcv3p-X@4p)+()#1uc!9@TRYG1jc$nt? z_;xBk=TC);{7*msRdT4m!7GTrcp}~|k=Fw|CcdoC?EBl4H`&p3FHje6z!J`0i{R2$ z5|w9(8eBvuv|)nM-iYj#gfo?M?$LTU83ZUiZqv|?xsoOqjhZ&kh;J~Kr%f%0>bJXb z`?UabnytGQbS?%m>saJPHk?Iq5eu^456w~r(JQepLpbGo`Lu*0P=ZbI>|j9>Me{i= zQS-TKGS&#$&&$tE?p6kLXV-*=ookiC8cpzUuuw_41x0H~Zd-e!fvdpbqqqU8ZVd|Q zsLtfF)>wD)k*Po690T92Y2dbq**SD}lpdnZdwIxG$0CkD2EDB3;hX}wu2k(wNjE^% zzfgvddcm#*X>(Dc44K z<5Z#V`-@n3Vnp*(mc6^n5X@U0imjM)d4=!IIS9%vsd*h4%={)%$(i(=7QcCtqPqFe zq6TT0uu7F0^K-u*l;q3=&Iu#i(GEo+Nbfsk35@T{=-_;ZW>*!Qa|)crKgOFb?B>PZ z9*O;;4&!U}=g0Zt?|*daxH45&LHwm154H0sf>~KNSa!~cyYSw6lwtO=c0W8~?}KJf zG0&mZ$RDk&`J4$5Mwo#OVF{(XAC%LC>DlstP=LjM%*U%0TR42n5=~B>f4Cd3qQ&q$ zE{emT9B$h=yYUEP6^;4z7~|RA5R?OHAP81Bglz4}v$dDjsCkMF-Aea1bZ<&q(EQAQ zt?5)ELgF*N6W4t4q0Qy&aZzyF<(81}0{r!%r=dnu&O$LhvOO~FE3~svZik03K~H;+ zRyPvL%j{bfkgi2GELo5XfqSb#R5J8fTnfsz{*@DD{3(mx8B_Lf@F(RXLk2s$t*c7Y zx_|Tr$U>^|-XUzdu)|D5At-r_4F8$>Z=yBKQ8a&j(YfDYa?(h;$E^|S&jY4;=U4Fh zIgl(GtWd(o6E?)#Baa+dfJ^-XXr^uoF?eg&Cr$9=&XS1teyh@SmA={xw=&EIXN0F% z-p8wgQ0$ma;E+ODTRO>SaR3Y_!AG4hvW;hP4Hs)YK8wzMXei3z}klduSI#6V~{MTQ?D=xsjigvU`?@0~lcD z_p#O?GH~j9GT9tX@-0#7{QW}5sM;bgT-5eZ{y!-33Ps5s@~t3!_4(d z!+v#Hk=0CiHa~@pqwJfL#HZG@b#RV$NvH;isKG}f??o9&Ztz@)t1-W@`CI|A+kfY! zLXIiwDYK^B!Tbu8&BtC3a=@Lf;B+6~wM%n2)a^>P(2@zYt;C+s%)KQVocc{>WK zYfY1n&nw6C4X*SWSZeqyEqZpF>v_`->dB@LirXb}#E;O5X^F>cRpl20t20%FC!pWD zRn|fkU$+eOCZc8T;S;0vsm26?#v{ z0yywxreof4$7hb|(ZhvH?HfHV`~wL8&nAa?sCJOf^s(2cY1#XQvg3CG58PTVdHLLT zJ(-WY`vpt{eILT_C;)#%r@=8+Cw;R;A<6DVlARLGeE@QQAiW~I!@A$F7D%|SNoXn|QZ1^Eq)?U6_I3l?;Y0E*Qa~OhA1G?d+*YF2azr!g7i>DN(cdw7NjXi?;s!@ z=^cU4dzUU9LhsTOngIfy{mjfW^P4lzoNG>b&-I=^2v_#Cv-ip_*=v2*ecvCyyv6Sq z2Ps@Uec;9jN$|?uN z#ISr1!?Zw3nr@QWPDMwC{MBoYvm=3k5$gv#gj0h6pns z`H!&t-{9QsQqy@AaIi*QGb%d|700WnUx!FNP#4Rz!8P1w!lA?tDX4&EWnN}(ZDobHb?ZhKs+C6If>X7_tSogoxFUk?A*xF$?*AxNSQDF;pw1 zm-jsm%%wCvvL=z_qQFV4^zyj`e!a{X6>I50_urb|?8#W{j=#94!`i;{6J@s^x$8zt5FWR6Q zf=reh{hNV`%z}E_?Rz&M>{CqjnmpMKWNp4AT~Qeg^mmPxi!`!SSv@q^>J27V#~t(P zp%g}@h6~&Xhspph8|K!M2`tAdjGazsSz`=wC4ta|BPQ!=!q}?RRc65Nitj#O4|?!X z6{is60>DI2Zi_(N(r0hU++Rsj@lGwNFDw10H4;Z$xRt_8HsVi6`Z-!SL}C7Yrpl!H zR)2G1|5yJ0=d5$v0QB6xaXCcBbj^zb&G&6JlCa~vmSe@EA>J%({!AU3C+|W?p88{%vvlltvWrC zYl<=9@q4~37?^E2G`JgSQ(cVl&`p_hI4l=Qexp^RK*cVa%n<}1RBfnLBb7R26q6|3 zkt6&iHf_w-7A#7q^|bLPQ+-`+TV;xPLmYQ6u?e76g@nPkPV%(5$)9y9G^lM$b17=X zOg7R62i)+$qx;N6QFCKFWh7l6W`g-d)%BtA=U?-Jwm4ZCS@FxQV#A(<-foP$`ckOB zjHyQ3mvw<2~>DYU=G0uFyDhw%1P%lRMOU)RR4L zHcIz}NUp?Y2H&RM+LHL5zK4S(&AY6)dnV()^5&Q*0;$0+YO<(;QnNh+WA0q3#@dVu zC&>9ILe)L-@NHoB-c{bajwA?6#t@bm_5FL?IL+{LDz$Orr9;UaaJV|c&^z4oo1FgQ zrV#6+^2j)`kIvvmzpii%@FI&Y;MoBiw2d8=*We!V7eo%lEzXOUa4ChdM6pHotF0PN zigU2mf?W^38>^<9VU5uYfuhkN21|U^RX3d|gg(wJcrjCE7+%O6aS@~R$;M-_sXrtN z^r*q|wc2$y@0p<}4X4OF-nISGo+hHIZrtr{YgJRco~>FivRm+Lgbjx%`f#qmKEFOgY@h~*$NF0^|a zP_(4k0jz(fa;D7NFN3%_3n*SGvDFd_D~l(QNNL3U0SVpCO<*K467u#y)XX3%9xd(g z(hV;)723^gI*POTBeh5$n@9?lZ#F_pOtTL9(x+-qmH|@hL8gC&Te`NWrmDfD=*74C z*2YOQc8@q*Y%Y>1j8qx-b|BtI5a{7HRsp5h;mZo-l;rq2j<5HU*Cy6gkQTOT&7NPp zpWl4cuO7wRQ+NB#OPog+MpYGNhZXn zoS20L9&%y%RfJ5{x+PmG&qX zWAOg~xA555s=FMKt}Q&CV6< z!KH8|P0=BNrQPZ-t|03$-dM=*r2C`#kwcZy9D(}JXrPYL?$x5^fj10O2Qsg%U7SBu zVj2!IPIE&Wg`xPbIKCLsP#NmhUT?%{(Sm@Ke7J```hK_!lZe1G*B0vYqgO2{>T>VR z2G+(Uba)HgPu?4S{SIQ6WYxW@xICsWV93TAPrEtohl-jL0zFDYcvbJ#kwO!Asv#L$ zNHD-VDW`+W8n-~D%RPWIUaC-95hzb811{R3$3dWLBhQ`JB6;O{ih0+Dd8ejY3Nuvd zKPpoO9}wln^;K@HSo+6@?VS<^AlU=ltohH&%LHHnqtcd2<{)sg&`lX)J#}}XV>*ro$c@f)kqD5`Y;_b?IW*63v0Bie0 z5!E0c&LLiB8^XFEl&z(f+Ba>6=vVxnd+h99;>^?Bdw1rS)b{Y*;|T@T*)n7tFGfau zs{C;5>OPEAmo7cvqcNS}Jxv|-#66})E|yb*K=^pqMO}2=k8wa78KmB1<6Effqrfx2 z$9tFgNiGGELU>*!g|$ZQrR70_v_zq7&%$ffyUR&oBX~F@1+Z6uEQ%w?_viFwMr!%@ z35SXrS@!Mch?)SEDdL;)OCNicAf!l>KqqNwSf|dbK<#?+9RqKz^~}&|1A6k$7&jg_ z=}e-UfH~#D+*;$nCP>|D?(iHCFr-%|V7GXLZ)^8X#Up+tBT~kA5Bg!Ny2Na6FaM&h zdhqV+dG%a%*4jAZa4)zHd*sIA@8@wi3WJV|6RlK8<3$KEv8Zg1diXrGsJ1wX;zX$@ z5326)+1KPWje zEbjoe;Eh52@u4|84^OpU;^cJqzb$z)jj_caAOTtRPD+aXOk+27U}?++O0y7D-Ce>X zr1Vqb4i~E<{Jtb;!mZ--_dR1;YW!ACH0ir=ja2SFac2`ztT?PyOV-gMMd0FxYAB1{ zm7T%2Cgg@Kqs2DNmGL9S(#HbL8PyxktI7f6)}#yz z%w-iA%|SeVU-IbVIV2`YdC&7Pk*9iBCX*}0s+pF@<&;DO;nYLT{Zq&@E$vfmDvIX- zw@lzxVdD_LBR}=_D;h@+raG~FNzf~+P4ZtRLbfLt{t-zkT6rsBBsPBTM@38(q^eUkZ%Zv(#q|h?HY>1jVF{x=q4c zucEWpja$$}uB6z7qziY@OVIpBv1YDSJab&2>!Mh&v^WYaG&iw-0DWg=cVOL=u$64d zP2Jj;sNx_k_h98y5Ahe{s%Pp;`zJuc+QH2?ue{~atZ+YkXk%`pN_sI~`)@)ikx<$= z*8-YJO%V(A6uT1B&VLs^ApYq`8FBr>m$l`ZGF0WNK_p@{dhVqR6>uXb*%&H z7cwOI@K|pyPakG`oVBH~0i=m6!a&@Dk|}ZXjTm~6)3TCY*($b=MaC&ijtXkLz(8VE zlQ?YNn_}IP5E`52`Cic;W%Hgz+Q4fAw&WZifi?+_XeGfaqQ%2(o4uOAO?S#5A0pFc zq-wCwM#cv;Agr~TVw}S{KZZ%ZC-+-5-R0`y8xVLa&N8uq8Ak1Lxv8bD+g#C_w2JlnAd4*G zwHj;CTh-9@IF*PJd~8jx7OJiLr=5fA+&+zd6*~PWtTZQigS@)WS!P2~y3~3sf+V&< zyHoAft&ioIs}+IGM%nge+%w*m*J(+mX2y)!|mR9wttRzZgf8p?m&F+{iQ*drejaD z$b_cd^6U-IjObou>9$*K!FA3?u??VEI&eO^|4?AorhPFwA1jWY`FvC=qq;hgySytl zj){6u#?Qy$zgT)!_F}nvM@WI2wGyVle4z~Hx#L}|u~{${xZ#1!>{$*@(M(J&G_mir z6NT!x) zYxz$J}B(3jZe6-lplC+bKaVt{+v2txFYbNS=Pt#vbQJ?T$)(H zo%`#Y$#_k8`#U^Wcjb0zUP?-&+?u26&l?)_7he`MUt9LPDvj~ZgM$8hlR8e|FKr)( zC@m~Ng8mPFNS1%tTTklZmVM^K=cD6?055432x!WkD?#^cxYqCPc8WyvwYQ@U|At z;6ce+5+;8;!YIpnVQ&Jk;268Njw?=DmOQvdpiG%?*hU^PI(r@nbuC>-jwkeg=K}brVBmdY)9qRcmB`M@BRQ>>_sIH2bc2Zt=!B?6z zZ1iR}TWvtZDWgGIv@H)yJLTFWj;N7xYj#>`$|MZQirdW5m@mq(u@=Yid_j|j9cGcg zk_D9;`*a@5lDx#!pnmHjruYxs+%#bHbNuD`^+8a(n|^jw5|PMQRt5!U2W$8TK?W{K zs!};2Zz-SEtYvLA19u{C^osQErQp2>2o#+qX8?DDX9 zFjLsb$eLHUTlJV5f8EfLa0iJ@)uA)>g@P^!&wJ?EX7Ib_Xtn6IXn%9fsWYPr>zV)s zMA(%!xJe@wl2w;ZwdR6f$r{*zMM8wI&h^4um_rL*>loLJ!xwj>1DLJn>=mD=@vo=HA7RC38Lzb5B1~H%Lw;J^| zlX=E|Y*SKIfYAj({8&|=RcI;|lk(5A*&69jTo%TpwGE$3z4WTC@?Qsh`sg`w%cN4D z+F@WRM%)?jA)yiPWnyW58-b1vs-`#^nuCPcjv=3+=T) zpeWrVuQJ(kJ}jt}RKsGcuRE8?Mqy1Aom$i-O067CVllEGbH}^m)-xbrV@|;zebS# zdyMk`{RH9vMaA2{Bnp3-^!8kVG3Zj$SFG`}_vuJG+q(GNHDHh!xY=2dACht&c6&as zONVQ18KcS=U><+NhykO**D4N<(B{uE!+D1GAJ~#Uxu(X<9M?30#V@5qN`#tWB==aI z@Nw^#d3tneMYghI+^!^`^i(MR2uGMhbPr6mQ|?w<^b0@YU5`d)+$EqAqobr#zxXvn z|G3qPvSU6Rh@_f-!i~DWw%p4f0gRQ~tVY9;2GKQF-`r5!y^X%~ZoTs}>QfBR{zpJB081_IURDLf=jW5^vq0cR6Sbkj6ifAwzR$Ph!7EsjRFK?`Vz|>7``g zls_`MgU_epHPca>!HsFizZqkwuIM%oTN0_l1iN{y91&w02e=93@E(*A?me_b7GTUW-MrrZa-oL*rSB3=*N2oiRWcBFKflBs=zo} zf_VMHuve;-{r6e$G8=Ra)(N|gkVoeqop3OKWF<;;!A_1EQ{cA3Pvd#%7Fn_gnb8KP zLgy=~rI7s)mLII-<2(Q*)Pt>c={i!51*?h~TyY`>>~^*JGsgTO&6-7-xJIdVu%3(n zsIVd~yF^Z$s;z-OM5FdG?J-1Rmd^*A3;iG-!ej9Ir_+HOVAj{GuTJI&OnyXpKcXs* z@O}mXWai}em8iEW<^pAK4##Mr8J?l_Gj8rqV!d0*8&JvP>ku3Nn_UP;qt|bnFPdLm zBOfz!&h|Mk5LSA-U03>wU=;g+?5yw`I!y7NAu2NvYeg9B=sGLI{}2wx?bis`h0rt<2A@6XIfH{zI}^`2J7%+6NOxQqhSI)4EP zcb_*wEll&YOWCJ$PD zERb@x*|-Sa(KaJ#Tp#pXYmngy*&x7+U3b zuRIZ!4XvU7Q)HNZTU8L;VOh3wvYYf6EK+v~D=dV#|;e(lQ;P4;UC2C=nWqZ8W65O^D{6~z&>{sK94ue*WmJTl8x z%t;<%9vX+)UU?LphsoxGVXaYpuL1mxWfXFFcqHdhIPZ{f<*|lh_}95w73~K)d1`mH ze@JH*iWsbk_|eNdXze>9{>Qu^0NfOOzKrof4VpBdY6A*}*V1)*Rl<&bB*V&>65w`2n{Rn` z;TDBYWk_P4p1qHVr%Ta4r+n!6e)(la*&KhWOxn6^yS1)w(RTK-?HCxeE{Ha)wqZEQ>(&k&C%RhcFqUkG-@F*3D6OV1takl=z5~(5!Ed*w zR0O4|zI>GTLN>bwHj<+5;d^m>+N`x&T&=sa299@Ee_c55l>J+4Jd8Wy=b1U}@=oO< z)c9Uhs{1;xLu2Z~z4Ikr68V_2dAipJB&pm#sK*Zs{Jv}mHAdm7tOvdNaLN+#Db}1& z%2CMKM_S|$h~;oqRHBHlRnr%LIy*Z9&YKN6d~2AkkmzAioHH5UUR?VjKe;f8+Gl}6 zMaJpzKoxBC#Q_IZkAn3jXCcxqcAUe_ASNbE?Oq6#1RY8;>%rfa#wmp^>TQhlu-=~c z1eQuoe)+E^H2rP8TPX2JqKRJW6}KlZb6nr4NI(n&Fafb9BLu$DW2O(=Vl$cko>aW0 z6>47Dx4($?>#g!u!bGoZOxIlK;BGX=ziJDlVJ|n*!}g11rR#X$ zlcSF?;y?xhAtL(zeAns`m5313oxsh5$3x30n(KshAKjRMPtC5_>@;!+&6sR^<2RRN zST_c%(ZVSku%osZEB(DI_0g)JH1f&(NaqGk$kakrXx+gIJ_vM~AI8_&mubiFu$2K0 zUF|lvZN<8r`GCHP`Fi0~T&cTG2zXIRW4*mciG3g_LOAKw{Yv#j6oj=*c6GmJN_Ar))CIGi}w$KY9^u9Ptb&{ZFR0e)DO z!8qX{kKm7vE$SWF6Hjx*9}S9)>T{3SPFS%@M)U8hUkSk5%%++()(G#aI{TEH(Wzka zzxg?lELDfnuUZCeGDq23o-{tG_E#jP`gB&d<>@YD|dy2j0( zA%i+TN4~h}-7R`zaJWb^%+`5SNNKKrF>R(8`y7Z8A&ihC%j}RRMu2@hlCt<-lF2t1It(yUc>X|MreCW+1RJ8wo*0{GeZrwCfW1QD^^CyJ| zq;ZStB2AA;DiuKmTOe`KQ(02e!02>2+M9mA>bA~s`NxKEh7Nat#E?CrDXmXIVqz{j z9FbKrPu`ShV@OFWVy6eb!x)3xgYwwdyAr_4PnXFkulxbs>B!%8S1(j081kqaW{lPi zEIZT=Ul2R9GmX7-Nc8sWd7P$?1`u#e_%Qw)g5luk;w4;J$GFH5qBc)IL?svaH2(fg zp{>cfd*he6-O83Q5B)LV;x7PtgsHm{#S&w7yb5;xNM!^cRruuf{zNY3_4FhFGFG;4 z5SVq~68oTcl##DZJ57|&2%sms*zR4firzm%lXUvhqg2CX$gEs);My-fh&B_-rW&ct zd*r^KS0`W(~7 z^FPq*%kRjrzu+#a;oX-F6OJ{U4t$QW!-$#S$HT6khMQkvm`@%(jct88yQHV?cJuXZ zlw^*bjh?F;AlZE~0A~#7xNERs+p9Gz+{ppr$r(;IWsWK)rY+bbe43cV}SV}Cl9*O~0})cQ2@L?E`y z(VGXWIXmJRK(<5)JZz?x{4wu0$E+7eTNZxp6gy$)E_3+BP?I#tH_K#6pqC*2dQ5ly~{yuPegMVtlpcdfEYg$A_@plr5(u z{nXY}urk2bwdN)t=7wP9d)eNcm}n>*7<|Y zKy7WP*1(kt(4KK(%PNd->7+9(sZ*_nz<6ogsfHEl^Oeae?K7p-iQqlTJLaZ))MBwf z4{%#)W*=6=D|IYPHk`M?CEGlfQNsEjNA>kD1+v~>nYg|r7;Euyn|p>G0hwbs|a56`_?ArMGd_Px?lrfIwqTZY=!oRQJy3oJ3j z{O-P?pn0V`|@|ZOR(rmG$U>(ud6cdOlI680w^6t>0`QI$p_z8DD$)M*QmJ?Y7VJv z;xRL89||j0qk01Rbx$W{pIlBYevKn^Bqs~*f9Z;TnX|H25hCWE^u0Tlzyu=Z85*V- z(j6Ea&yh%mAJU_zdZz+&2YqL4Gz4pqR}k)4?nXe-O-}{0_08k^L>s%vlelWG6|CNv zxLRsz?Pm3^e5|S$KksYUWrY?nbU}P|9rWQ>nb=<&(_Wj}R7fg>@DZaYs?I z-=d|?SsewzNjhdc>TZZoTk+W@yPB=%FsWWVBR;U?P#H0oOPtn_7Hh_7+Egtbog1mP zQN;fY^x)6GPy@QXRo6x-cFnkEMC9>zk*dKWqj+HCVBzkEvL@39e2$X?u}?0}cSzOA zCs-qfy6Yd}Jm9OwG{dm6-_(mB1x9F`5`xJx>(z#XBPK43HOX!D%?wT@>144I^)1JD zUwLS#eF5cj*&-(FcX#XK>6LMtsRK*I6mJCrU1V$$?$b%7WMgN-_;k7p9DHbD_{Okt!IARa#+KIK~`Ds*k9}BZT%n&n4F^Ts2 zHfCo|F&ZWVfMObPVqM&_p&w`)tL$03s6zTgoFD%>Dg9(+BlzR4b@yi&{qwpk{$MRtGgbk`)R+2@tOoW4ivntDap*kh>|^ndiTe4qDl&*5!0$wRly`#E!NXpuQPbu2nTsNc^jK~D#gI%)k-x_nOaB<-%Z8M7$34J*uK^qsAtAwgxRMnzQ>|>6fg__a8Ohbr+96SXGAK2y z=TzTa4GrFb$-&m6yjsc-DtIs2$wSrkh#x2Hn%zc;)vWrMcLN!=;=TN%0u{|Vl z+?piZ{4Fwiq2piCpdeT!aa%F zx5&*pR|5tsfs&(C9C&OS4Hxxw#zS9DB~UApzUQ%DccxuWp70fI$!}OQ1@coguOLYA z5c#U83g}%wOGK33$7g(0`NY*u@!NLm*2>nrA6*R2+CEb!7V*{P8$eJP=n1K2sn)*CXl&xuVEBan4JpFzUR;!e~q7bq?x3XX@ zlh#Exn)z8>=?N=4yTr(1ZoF>G*Ov7$to3@6(|r>NIme4hQLn}KYhbW6OwtlUt!C*t z@0?<&ZSY30-vQs{lZi?+8gNO{zT?eLDuu-Ey4M{QEG>hdF~x1aRDpiI{b2CoJJ~zM zCpll2tzv}D0?RQ5S55$Y7JVPtoQY9&EHC%Y6k9dA?Db<2}RjvsGYjQT5x7 z_ReK%gO^(lL@NDkDm@pMG5j@U6a;qj)B578u+)5Lw_=IK+tUx?J#@mRXu9)2%e-9} ztYFDxXJWTiSEYw_rWdqu$JnPg-J`^Cmtst%_V={@ zbJj$|&+q$%)>yXgB}y#D#}KHlR)jSMhsm)R!cFW_qC#z*v&`)GQ%Z^YMCNS~QH$$# z`CsmF^L$!|RdHLrub|rWmb?;&P|N1^hWp4Rh$SNRwqi*WKaiDW6r&dVHZGFHFJd4yPtBnRut4_gYyc6EGkaY%fc($w~n2y?q9G7XLwYP~qap|>mrku$@FJUH# z`z;Kw4qx83?30U|u8sHtpl5E8f~DzgOt{ST^$S(05%0nCMsEry_ID*)}ubMpWgf!M_`&@bvjX_ge%pkO=JC0YC(B21XjDmDN~qb#CA(HA#IyOW@9sA zFm49y-!h`8kC{H%Go$;CI*j*ibgx)r8|$rbx$9@qXc$o<)vj;FX)%@_fqqi2|HKe~ z#w{PKjXE!atftOdHb(K2E||_6Q|XHyrwnjy#pZW^ev(k?rq&`inji)SmJEvd0tL3d*YwdQ6i1SjCH*9n8MMx};3V!*V2 z7?gZ`2}0o!s}y4|popmMsxnBBQ+ZS(q%suzj*R{rTF!@IjlOa<60DxFtT=W`SyK{e zf?$_eXJ6@O;Y=HNbDJ)VoQ30S>GP2DhV2^~EI|#%t7L9TSuR$#HVMXClRE!Pzc%Gd zI6IlyA%-H(K&?Z^%Fw&BC)4Bku8baP^oBT^Xn8l@{x+{T7=_KCpK^$x*+^VSMoKbf z4$;2|nIT9Y`$1`SOG}hL?$Uw&EPROs6K}niu4@#{c3OEhHi;LVg}l(3s^gLYm- z4^I}U?Njs*9p$A4Gw9gfpbYPdJQ3uExehHJ;jKX51au@S@(gi#l*3d>M@fcxaz?)P z%jDyBSaW{l>LloGd716r)~M6=Dm(0nih`RP%o@gfuwUflFmBk`~p zM_uNYA^DOB%5DDx>-2~7WEo#}9VCmgCti7e2~k?A5M}sBrq@h@hZnb;8in=bn2i^f zz2(ue315UvOib*MpS6{Vlj79r7pL@eZySA$0o`Gp*_ip*`T*;RV$HlQ!=1B}Qrb%G z_h|RS0?9ypkLRlS<6SwjM*rj9ov_2&Fw@?f0*bP+&MSSNi@s~-AbO8 zSl1cZe^y)^5vLB2D^jpwtuuahe?XKNuNRf$rM~RHJjm(qp~NB!7c`s-k)!M_PHT9E zXW=2Ci`yFKT7r4?j2GV>0Tw;9Va%HXUDrj=7X)|H3Xy8fbVA?n&Y%U1JG(bGBU%so zb4HzEG9{drLg8Bv2sQ#!-Df|41|cp$*Zkw6J{;ZDruhw7OY^(NZdfj7s3+tJ0Jsq% z=x5bYF`}cS4boAd<5p=SZKd3iM{EbTlBE-_$|iMditByHSe#$BWQKmL51L*EM|tTX zq7}nXPuMzVue0ve-BLa5ZfmR1MU4jeO2JdrQA?lrn$W}mO2?e2blmfN&)BR0nv+hB zTFOllKNid7$=Fwt8H^029S#A9%+hAd>qEaZqGtjen$4|O@LKG9`H6SMD!ib%h+q8H zHPP=NiL^(@9r`qt5k(-B^Kr$2Bw-}4Xxn|`dHJZ+cGOPFfVuc5<6}*un%t-bh|Okc zwd+r!?US3_;kwZaXOn{cZPebHa$Vnp8z!tlfQi=Q1*{0_i*e-SjJW*V`0hoM#;h3k zb?8P2t0P+Fp)yZ2^F5(zAq|a)tLoP?BBo^fIn!=yvtX?61$U^N)3_GF?0b{SAfew|8eS zGf1f;0+O{zmfMCFs;`W?ShuBkG7x_oO~|W2uW%iH^#Y#SuI}UKhUQnFJRJ=-6Q1TR zTdAMSYDnsKUt&x+ErEW_Yrcs5rcFv^#RIxWKuP*O;@UsEjUDTpdo$ijLk$GxxY7Vf zcyqWRbcw8WlK(AC=S1gW;`Nx4mGOZ>>j4oKbs>Sx};`Gk)Iq z3!-k@IXNm!3979(|I{2t`a>=l|BV>yE~S-+9>t*h!=%99JsqTZ6T{zGgQyX~5?Ox@fAB zY-V+>BG3l;=;eU@L&Sl&?lj_xKv6G>i+ zGND`L{uUh06}QGb_&W$Q$fxR@7KS1ufkXSo{5n0XvVXlfl(jXP5Df?CoImJJ9P6?o z0B|GlEz#!i{ZUpGJ8bEJbe)BPCd#18m*c1WDo-Ptt?aCD>MWtP=4L!)1lnMAMc1s! z+Gg2}yN~|ZGxJ7cYrrZBfLgkx!$xYDhIk@izW#92jh zDfZ5~9bYruYQGG786ZHM3FnyR!z!PrY9@(IsJ~l!!=J+UqUynyf%1>H(;P3<0#>&> z{V0E>Yz08p@Jx-ZRtM8xx=tD&J=YY|s$u;_&lM%Cz+#;m-tFmKO#xkX27f@b23`6SigMw&-_gF657k?; zreT&)z$DA}R9L9%z0iB(+q2suQqGG4IzCz@ze>{|Ug=CC3i;i9Bu`Tt#4i%xYtSB% zPcvGt*(f$VbtI(f*cL4_r2g-3lK_&PQ3j>+FMrFqhRNj>?uP1(lTnd97$VEhcLW?<`$txXfO z{RQb(CX;R=&8-sZ5&0zxCqRCu7w+G-;m_B`Ev1$!x+v|1+i7zpY#3 z2jtO+%b)fC0iBYlYy7)+#=N1in^M6&kz!qILBM13ibc;W^3_Y6LKwD8N;0cf-0b-v zlYa7$BFBJ%E-Q}3f*#>rZ~ZB6k5WzYgjE6++1qp)x)XkN)=iaH(sBvTvN=B(!2xbn z>IZWqzl}DCLH!iowyWt*`j{ltNjJ4mU8?0blihK95+~(j{oS6`kOLe1t6WC!PnDCv zFFw`kav*ZJu16Nna7>P77DK(RHhm^N?9`p&HdR?SeL5df9?RRQ%z@nU+)Sj}02jtu z!24CK>crwIIp$meK@!wg5ZGo1X>sZhgZVJ>%DnPT`JxW<(m>BWhBdg-Vyfwf;dt>k@_o%&bysvOWw z`#a!O>*ZQCowE2};H!THyhcg~e%<8)=J5ZHi~TF5l@zkT)2FTZ7vw9$Usim%Y9kfq z^l#MGzoKEOJ@5o4U;F;08GpgMa!%A6Mg8XuR!Q$1awA>-Pq4BghvZg;`Tv-Mt6lK! z|A${SRvznZ{0B6n0sxTrUY01@$I5>l8kZAnK)0WQK=(KG?cW~iC@7Qa{?n|)TJmaQfscrCIyhR^}$H z`|zkpMd;o7w|Ea>+Yg1|MFsx7ts&5@0g^fZCVIn}rFc zJ~pDaiafpYo76eOI1hPYS=#TEloJA;b$>RXWQlu!zbT&QZbMEa(4(uJijUKoD@&ML z+16s9H(E(W8mM!)XD@hYd03yw8vBK{;g=rTRJNiWgP|S{RiZrzWJ0kf;g>sVtmfj} z-x*S6W?UQr}_H+0IdDxNSO#NS~-=Ec8#?X1%yT64ae=jC3l z;d2BVC9ZYLR8a53%{ZSrb*{Nu6tA6b?VG1(LH9*Gz*0IW05_@$iT49JI!rI6;jlJQkV6a*x^E0Pr*>^VX$vS*2vCn$Tn=&Di z+7a~LkIds=CQz+GjkVS^G-dPBkG$kmhYMg)5x!RG&&up93pvPq_0MMJ7LBJHw@u2A z%C{%9;V11UyR}Gsdgk%m$qLx?A##}C5*k{zDASWD5pV>ble1`lY!Kr-Y%q4if|W%R zEQnFx4?TrF-`c46r_sQ0#JwiwiLTV{hs8EFSL24b+!!l8R5*Nq@`!kLmqJRHeE6|fQ|cfTm}shh6Pf85r`G>c zg=EVojIv_bjORKT>*TgZ@MO7jrqRTgb_^I|(||=2A-sId;Y!vgN=5fdu8Y+L%p}<| ziQ;q_N6@>}gk7T~7`#s8-SIB^3b)l|Qv1t=>BL7y`PEa@*YYk~rJfm-M|Tp|-x(jtfB&z$ZxHSO(blxTR-@hdk)ZB0w`0JdTSX6DNI?(#8pt~kZX%z~01 zBLb6l!(I}{t(6DaGp*&9ax-(eUPfY1YV4hm8p*tQHD@1Ojk!(BL~F~$fUXd{uL^km zFM*o=ww~Xg|3k9-;x-76xH5hm*RMguozsQ(%!=pWapklPS|XAdzdQO*K^A(h^<=(d z{Majvi@ragp0V>#zip)Xi2My-nYQwb#gpr7^NaJOlW0IWoO3`7z}gXQX?|E!<6@`6 z9kl&p2(jt2q#1!>7lIPCY)^|iyKy68ArB!SD6PExalX=B|1LJ{M-L<*zQ~05NdU!} zM!A36*;FxSf zzs&&D6Z8wS_rCuxH9R}Kyj|}vTT=aN<7$*COh19-EUX?bzA?Jz9>_a#X)64j<+q;O5&UmIIBIHdbVp&2alNOE-QfDW1QO#bdjI_ z^JVpaCcpT0*>9|;iQfzd05{V#F0NZfZvrRJB=ZkQLDaq^i-G-SLaHuWhT@yOne8*EAmz*z|}jn)N+z3gmS8q&3&3%jgvQo*UHi$OZKU?J|UeG$1 zH84V52j%2}?LPFT=9(xIl$Ycqanvm8PnZ&Q=V_YFteay7tlk9vz^k3iv84AZyP2Tp zoG26JwbejzPKiX~^mYA4rmgFNq1laC=SH@&rui>%Y8u!z^=zGbrf|A!r-zu|CP1Dr z8<6VQlfp_(4(N>WFc~yodHJJqV65Uv^wjnKhTC?$UpE9*^+TUm(Gus9?0x9#h<>mq zcEeti2p)uc>oyuMsyd>X+Z&&1-)_g({Qmf(0BmUgby2yc0_+iD?4AX4&8k^w@U#ZAK64Wgj`y5sI!dhQ8m!1N8dw;)>dYPXdY6%N6Lh2j1KQUq=tfCBDf`v=qqX+!16J@^9}lS9?rJaA~k3^V-q3qF{m zLjQoY{EiiefDevd0#(^~ppO=~iZZwf{sY1TAdp+5KtL?;jKu3&skkP*p#UN?hk;v$ zE+5!?1O~X5l&(6LF>F;pBSifE!4i&ZS%5)&=Y~RlXXfUm^bJLDhxsJ~mj4eZAXWZG zS{2Jr=-+lKk1_uPx-aI3bq8`k9ty8yUXvW*`K=@l{sBb*JO7VMa5GPT{{gWQVod&k z3d}Dgjxc5n_xyhOq5fNY-x<|px2+3;q5`7y4$`G6(pylZOO@WD6e%Gf9i#+7K)Qg4 zbfxzidhfmW&;x?>gkD0t@AvI}_PzU@G0xfho^yZQ{Uak8Su5*Znd4paS?`+9d}a&w zo;_fdJtkD6%w1rm1LzLJ(9Z#08sUiH8VP_MYc%o!YPQA#*r$sB@e3B%#4_eHXw?Oq z3R_?MpR+X6Ukxq$FFXHf_f z%;Vk|p_;J`DqK#;-h*|>&2?}u4f&ccEJD3~O8+R&FzoP`J;z_DlK)+3_@DYc-e03R z|5M}rcgMl|>-i|&`avjDH3-0=VDN)dETx^1MwoCUOs%S>u`cBK*!rX{bDd^yXXhN= z22LyvLD(f-o7v+DjLWLDXtxL@!1=CEVX}!@epLfet&L+dw*hZN*8cL=U(@JWX82`> zd)%Y*#Jp)AEdkaJn}Wh9@3Uq3tvY4rVEGlvik@D0NeXt4R|l;&jW5$?kPq)o z#8pexb%Wz&=^rA0`iBSEy;L9{xgU~pH=I(eQ?vu9Z@v2Lh3`??+Ctz%jrjS&Zk(FDg5~&OC<`(9SRqKhRUehp=FSg(Z6X5NF&N&V~6`x20 z6p_$OfagIgl2B-Lovc0Ta6*H53KJ^t<-iVvzhd;c0pcSntPN}uGa>GX4A@*U!$t3S z?zwHmb<}T$z{;u`n#hTrM~&#vwl&v+Blob-toar<0lG3zWCQBiI{hi8@M3vN$W!Xs zkAd%|s#5p2!BxdwDY1zDQ4@u@jP4=wJh>j0;Qsp0IxB%OU_Wd(mTsb%%5R(r&e`Tj zAl%gw*eLC^+R((KmWDKrhsSBcV{I`tiD??C-z+Oh-Aj~N+1}Bd2?R)aE(tfG`I0?7 zOh#d=^wXT<`1M`itD?2;2Z10J(7jEg=7U)Er%?fw%};#HP_WkJkou6$1A5`!G1u;D z<)IWapSk)(=4d+ylgLA&qt=ZUl14oQVvC8IxNS1TAGDe!09uui&`;e2_A{WYjxRFtRdZ?$?7vk_VfHThc=E!_R#F|vpvJo!+XA;)Z zDj=dw(9z6-673`j^y4_!^~wiDHb25VoH-%Z_8zzjpPoF)gC~~orea6;sjON*v6M_{ z`s1F3{sdX5N=~0opmwxI-DZm$ZA$YwBjHyNKHrn@-v07<;WHc(JdUn)xJax5mcZrxdNx+NVphd5kr1H;dPT$?~c==~-bl30asPyK1Jt(ib& z@XM@NNOcYSE_+qvy`MN~q6}^2YS(FwD=5)D(N!o&&9Iqy=PU`EOMII7VA{2m+NkqF znaKoR&-aF};^9KUoi(S0;xO?Wo5vKjmL&?ZeGDj@g*xV#n7&I|tOJ(rd`egV!MY0S zWFmP+5+e1i81oDYGOMI|-*xthHQ;i~+}ro<{?R}si;mutXGF4l3qt{SOR3N;>Zo-l z5eVlD>tEm3h`jhTVicfe0zBsnAeASq} zqt3i#8DwMV?QM{~&CgLRhtYWglB4ieOL1->T%MNT)9Y#YhG-%Eb6X!tkKFQ4rhQ`O zJC98x#&1x&0)PErh@t}{I9Xbi9>Pd5bs{Zuq|(?l=6V$#POKm&yjN5)pD86uDp)8r zR?fZKpH;@@@5#{h_Kz6Kf5d`BwwMk&wWr)Hj{s_wu2vsg8qSRtRvAIRB|lMsL%S4* z`ZT{KA)i%BZWK|*J~Xv75&f9AEh9>#TUUj$`wnwz=9U;F=qb@5JpZ-!nFn-^XU})3 zDb{+>2}ETzM@&84BkBbuFIlXq9#G%QJVAtE7{X8cuX8rz-vo9Qvqe2G;E2zC`a~M% z#thp`t`Fn+$(5mxIl8Je5Ducb?bB*iSLZ9B&Vzp^awxju&}+rx@&;rFokse$R)wq+ z$ul0oQnD;`3_iz2*9uTOr$6(w6-im!IdpI;`!^Y}3NF~-gqV>ue^3_8~ z+tNaOZMtqxqsxdtrsx-f__#|z_^dXn>p7)9#K6c)u=R~>q%wX?TEJ%p3W||!8LqM8 zVs9nX)Ou0PsSGg4|pFKq2=3Qu+VJ|Ivr!Z?U$* z1yjV__t<@#6}{IVvR?aFy5hu&O9vTzzww#7#NA)e*=b>XO!g*AK}L`)`8^zZTs1=r z51XmUdJE@(J0^#Ml5f*xZ^{e#xeCfPWiDlV{OI6U5X>!SC@|u}>z29Wa~q}DszI`C zL=|W`>1Z`wMdRkHuRYC=IMZxtPSv$c^7_WV0sWrBbu-l7DpY2$px}oN4o)kcKTb|j zxn!fw_6VbB9&`K%D{Hm<+eJt{Lu6fJ6HwKDh3xv<0Pn9)oQpLB3!TJQ!6GLk)VYOt znp`x@xN&j&wl;YV{Cs=NvD@1^!PH+GW0l&pD7@Iet>6%7+u{l{>*AN;P;UD=$udVo zJxFz>yYy<~i^iC3LxzCzrIqv1@TBJ#gK8gb#?$Yb-z<>XvAE{*ar zyduSjpld$uR|$}wr*|hTF*Q#G*SM|HRMo_F9GL|#*M0XyykUH=P|3}O!-azjwir{B zw8vA)XZ|eMB(`;&&0X~D^}UIhwC;@Bx*B*;3I9(e^Op`@)GQGxL~;It>fCmJ9-IBK zjs5eb^LSx?L8#D?Tj`Ap{RiqNiZ~3#QIP@nNW@rj{&oZ0<3Z{r=0m{ud~`Ey8d~52 z$7%|Q3CE?QjhYzNo@OM^#VBgc^Zl~ENb}>GS`op_`2qI2c^BJXfnQ6K#qM*KO3-2}d@o8fyQ~jC<*qIgb^STma2#!AfWwIZ zPYA7AdbE#nwwXln-t|Xf8@l}a4Hw@)DB;VE2H3g#%(V^}lL0$(bMSP;-nlA1S2TvB zsY3iX`>gT|0W~$7V|ATNrsLiFiujDTOPi34e5)xvk4pWco-XGlG_JnVOR_r+p3$o? z;@JYy24z-n-Mk&u00TnG%5ve1YAg3s{d zOrTS?GI0{bz4+bfV7*z2q2B$&mmOZAn&Mo78Vko-G;EE6r*-t?ucMY{Pj~iw6z$;i?l$Gy>mMKYdn(ka=0nrB3>!ce_lL*UoG2X*J>u zY-aWic@D^Biz-bX8&Cp*6u}Rh0u0iXs+y|ib9i?+Me-(o{s?;>EY{ysrLsqg9ay?O z<*Gs3R_GghrexD7GX4H%DTc)(>1I>+cdHNa`}+sNgwcC+v;70r$lL4siq1#Gy}1Wr zkGvHjHv_`Bin{-{s{UV;9sLck6=WXGb<;}qYWPD z@GYn51VdCpkdJ(>To&X~Bt73?p5pPmDWXg2$rn1E42gIX0X)19Jt}iLrds!SFD6>w zhlESXqnX++mqUdUnd=)+zUyZq?%H!27Vl6?&mM52dHVrjL>?~}(xMxMHy4V_(KX#m z9Ai~?;3K!UlOnU1lD{wPJngV@%=Ja^pKc89k1qu9|n7**&z*^o$JWD16b(Sn4|UpqQo3 zrp7{&D6H!lGg;IME%*o-MsKRrRfAw@CvHn{(KA7FBCB~mYtfF+$}uOojeg7C z0cu{SjB66~suDuJNwAdw*pIEEZo+cR^%UL|M$P|Pq?|e@Z1H(#EXQI=Odc+I067uS zT~pC^Gm5eA71iTH5JE2aOsuNVAA0ZAvAT~~Ur;D?yO|g1-~7OQ0@O{IC*axl=|BqH z_e^Lt>ggIGvqf}m{BX&AmKp9X8DC{%er2wZZi8U`h@4ZU7z~T}evuqKNW&D}%(?Y$ zTS6v3Zve^wWUnIT?dp?lj=;8K+p&E>nZbLhyr1nKQtgiL+7XHqyf60E00m>DUGkNS z7Pj1-mxpYoNf)IXp^^yk0oieig)y?e=kA+T)&}QfZM0@_Zr^R28OQfr2asx0Uqm!} z9L1%5dgE_^G>`;gX5%7Lz?{7}+uFRSqOt6TUOpE>#Nyk*hv$9EoWGI4MWA~|=|oGcb7U2Cql zam^DtO*7z~mL8s=O$mqMYrzY^>t0OE{G-%qei}4(Shni~M;}OjxR;wahR4J0kta*i z9nL6%0U<%2&i=uMFbYfY?%SnMG{HvB`)a4AFO#pmo3xCam2nH4Dyl+CTj zs0R)D;JOHF6E(Xrv0#&Ay3S-7!}~Dw212~ETp>t>^IK5DL+85QjW1SoY$9p7!Jf%I zX^$(w)Sh2Ez|k7uTicW(BFzj}(v|EOXE~_ALE@K@=Dl2-mBTb))68JWAZN5^V5b>v z^LaSZm#(vrN2dZ_Csj68M@O~+A{IB#7nK!z z+P_+HW^b08$f60tgiRsJC=*7h0O|zQ$%CKhoAJeo-WT>ILABxbg~Ij8ZsK)^lMapz zKTBlK79r0@G0M5k@kl4%XI!fZ%*SHLic-v-mB?2r@X;y(5?g&E z=!aY5!w@dZnwlQG{>aZ6PCfku^g0T*58)$;fF2h;MjZ|3@wPw;?6Oy*q$GCuisRws zi(uc3L*ck<7IA9h!OuKjwCQY3i&~yQ3BhE`BsANg`&Gn#2O2ewA+!#Sr_ZVR`OKI2 z=UT%D46~FBylM?~=aM0ygev`Ql3g{bp;*T5@YpZ-X`J$@xZ*IVh73h)UYUTX*R z#GGAzOla~m(JjjGOuOpeIP=x@FDnV^njeTC@;1&pk%w2sa4fStNI4c>&JAmRC|>~f z{0hF`nB$QYiJ#;2CXyLEEI54sJj#5pFO34IbBX*s~{%ifZsj?1=4q%`d!-&w<^Dal18? zgZ!&sZ+Fkdu0MXefQx^3;JkQkZ1~S)-hLVVS^>IY_?abPlst$YbG}$ z!j&+ei`UbrRO{VTOy%5r%Zk)yhr{sM-Y$9;Y4 zhqi9Z1m~(mDmmz7xs^eIERd;MLdOpR#pZPe-`3di!`hTv zhaY*%&x#u!&3yEM0=nBv^2Hh@@;?wpo==r#tSW7oRh1nb{pRZdp=m~gJ*5w({_onH zf=xvBlUHWKFsgamRLxB2w0v)m!mgazZIYXlBEqt86wbuy^l)&IPHdN4$u9C z+zK4#% z)myki#X|Oh;kYm_Ic|d(&O1qb00tOlCs`-#r?(s!ch$=j*r^12)*Bm6_m3EvS*?eW z>5s&aC5E#Gwqt#i=}_VV0E9_-xynP5JOq_lZ{kpSm=E@RHCJ9A|1zPcL|X0uZ14L2 zkyVxAPFnSe&~; z$(k%>yr%JbGiz^utnSfnZz3r}u9MkwLo8+O$Ndre7f4YSrPQy01s^Kt;iZ3jOr16DN9hUf zya%gl9+Oi0p}Kk+K6p10!qfH8ry!V&=XtilW4sBm?uF5+B$jdUYW$yNS_Mz)ti4-!~nOCOUFMKm>F|%x8&sU)H zWz5>Lnm5)e#vAc6Z5fFhP&#C$s!$liN<)2;?3v4epJ@rF2M@gefK|jjh`|3l4p&?d z{N~Uk8i;7ih(AyoTJe6j*3pNI2k*s1?=Fa}Tb#ZO|1LXEed}plqJnsIHwZrbVKr3& zNvP`0lk6)o+<$p&y6t4J)aX$jI>ozZNWY-;xuW5gXB!(!>|9SlP5U)&AfA)g5%8G% zz~!h1prHF;e&YZ&?q*!hnh&mo;{@9F`1s|!BTIIYxRQp4yRU_IzGYmw>fL4y%5ou8 zrfH^0E&Z52*mg+=O{vDMCy+f08hiFSlNsm9KHaN|nqR|`w1uFPVxI_epWb&O?} zxhlm!c^}irY({+nkV>TPm!DuNvLe03TxsU!}T|C{HJ~Vqfd6?ugcD%j1{jl zSSF!vHc^RhybBDKv*oOAMHc4#u=SEDmo)C6LSVjq6^YW7u|@P39zAl1HwRb_Q)j^w zo$k#~0mX@8#b>qSMQd6GJada`g)~IaQ=ou$e-Nqk@NSSlkw|Q&dsGd|5@rSQ$kdz! zL>n)e&Axc?G5Gi*KK(k$zBCNu}N*qu)toXS3x zR&_7ZMbNHYjq-uiEzkXQEme0OT)ek0af)&+EDrRWJ&lbLI?Wi%rZ)p5ts6fnSp;uL z_693&h$AK073d3ztdx2Qn4J|@hvvqyy*^+hsOGwCDOG8q)r!~r@TCHtgFPm)@d9i; z-?Dl28%Nj-72UBY*`}AImzrTm>t8WmyuV6Wp%ToKD8ND1C)HnGVnyn}Q)$lSj_c&l zV)At~+b`D_C|xE0&!z%Dy@Lf>vf;AOowAKW2QG!9S1=O&Ei*D$ZLS zgL|hWxPCr!H~ITsirj=1n}0TsozD4E9SW<5iEElB{AG~n>VaP^F$Jz#{QMyG zR)gg+k*_kMbf~OG{%p?NtGEy^v1vUci!Mi`W z6^?>>S;dQOMhlHhzGSRlv3s0T&va^MGQJTGSS7z*D=s2FiGwTx znN8)|4rrNj{j*6-|44t#BsyVF5lsb1bFR5D&6#)KPgnsat?sY1_j7!(LO1Pk3 zM&I|$#RlsFOgT3VtkRjlxvfAX==ce2Jrba{QDsah8aR)ZGW2rF zJ-0TUI8ja3sy8ThAl26;pexJz>FgBK?LA}qis;@w6?86T(}S_FHHXI?I;RFHek`PW z+*Pk;`g;G%DcclahXGc`NZ$1TCJJ``NeT!Y%W(t5IEh?#=9MHNaFU&QCU8jd?+5V9@*8QPtYQkICBB4|ch~2{uqg|DE}2Xc@1S0G zl()~kg%>6GB!%QV&uYtT2iBV8Dj2pm<=Y2NwBWDdyqw#G2c$ z(dQROQ?aqm-pxj_VUELnrYG-%$hFMgTq*;iWq(;NJjhM7eg`bXOz1q@8vT`e{H#+y z-{tKLWu)k7b%gzD7@e^*#m7kUa2D!AnYY7|(Qw<_1o#naF?(id#?iqv!@?{Jma*<+dA zF0|~e14ijNcZJPgf%#v={_@By~f&O z^|p_&cebH1Sd}uJzJ}H}YX)~O(92oziA5bXl*as)LDKa-?yO*DOIPV`TgWe4Co68J zfG8&22Ir;&F69Htlb^QRSk6sYN?|*Now|cTBx9e#G~Aj3p3pIvH0JZjy{0o7>Fp)M zyHnjPica_>1R_NM;+qp4^%NiD=!~1#V_C@vh2c!ZP35z;Vw`Lf~8i3vL|-$5S* z-JUmF>1)76M(qBh&szUXKM?V+jIe9}-#8E3YaJi#)E@-9;pWrPRGCs5=&=%iQBWj_w><$KCndLO)zH7>S$$&d)rH zaUr2BHnS3)1RT%n?h=T1Ich1tQjyA=&UvGq!yy)WY}aQKYs`*VlsNIQ?vkNLr+$rg zx)~hgMiRjbcLZvE8=D!}4nRt5KF_x|@NL*j-TaBT|0tB?Tb<>b8IpFyayaINltzx{ zt4=B^Qu);ZY3EM4Vw3H4JIWfO4UXDaARDUS(`K;nVeqa+*;OZwplr97%rOr6Ya$Z6 zBUq(%_GV6#RB_Wh2K>`;botXaom{ThaVd3fU*hy5F7}Z>s3n_tGZ~8)>qOD_%GbbD zo`D?}Na^^Kspt?N4J0){aq{(bdUG=;&dQNZX-MkH5POD+X(0{!3nD*xUfkYGf|dCF za@a~vvfdWrY#ci{eMJh#}__&fJQjO}JR z5`!%~{f*;LxUYg)1_Alk`G=SoY@{a%;28M{ut?a}_6K=#{V2ai7{I#wH>m#S(|i9B zWc>Gi{y@szIy9BWj6+2dS<(d99B$S`ZIdrH_jjkX;5p7 z4d4O5QRg?#h1NfIb;ZlEFs#A#2&Uuz?X@#c?a#;6L92{C*DZzsMf~zwHzWEalLAwI zm3|R|QKce;jmpC?cY$32w+!q$-H&t|M0iYhHe(rCrDmp-X0r#0G&@5Rf1UDhLQDK|qp{bCVHK zk(?xpD6v~oL#Nq#lIPqv_ultr-h97r=4`0mYgboTwW?OtzgDf?#0la&z;sUEP#+*6 z0f4jMA3$6H-1OkE>i}SE3>*UhfEpkrVFAcMiUj-vVqgII4;lc(!8-tur9=Z%;GG2w z7oU>;_AROOQ?lP^vdr%ZPhHYAGz9OL9DSXgz5HCg{fP@SM_v8>ebnXUz5L`HoV>3& z%Q<>`%EKLevORFZ$aMe}$ab2^;3$Y5)zs|&qoW}s{N zbH0YUy%AWZ-Be`@A0nAhQF5|;(FQgw={t;iwnQee(slk=O@Hf_xE_;0JGou zclI(k{agC$zWTqn-O~?@{k2|KnBnhv{ar7Eo10(S;^^=P|IS{g{wUM$%I~~6IiJz{ zExogs`S0!Z@jv~0T|OQ^^yG(poIMPFkM|3_^m|@^aB%)sj*tJv-(~3LaPIrK{E-(p z1Ly(g!9O9u98d(50R`~W7y$g8;o$fM09|jNU|*PvtH01GP~)71480uXWP}utD=GlM zcm4gI1^~7ve&{6$2lXFuK4AGL&w%=q_eb0{9ssBV#}PNpA913E0Kj+!0BUbJ2KWa4 zQXcvD8wEfMFam4$s@fG6M!1OcJIZ6F$m z2krxpfD9l9$ODRiGN2Nu1Db($paZ*aNU6BqUTM3?!^1+$4e| zVkFWeiXG~c}7x9@`|K^q@ARXWRPT% zWP#)>$v2WiQZiC{QZ`b4QgKpw(i5boNR3D@liHEGklr8-A-zMINcxyGm$a0$mb9I; zpLCRTo^*|Lmkc1IBjX@DN+wHof=rjpgv^@kI+-t7I9VK7Dp@XBIawoFFWE5JJlQ(g z0XZc(E4dK4EV%~xS#oo7NAerw;p7SA8RSLewd7so!{iI(C~^V?0|g(2G=(~aA;lF6 z7m6T?Xo^&d=M*&*-4vq~%M^Q*l$0El5|nC`XDO{H-6(HTCQxQkR#3K44pJ^r?ov@v zaZ^cCX;GO_IZ*jiMN_3yl~N(8hNzaQFw}I^g4BxC`qb9cUepoPsnjp1Td9YrSE#Wx zOf+IN>NF-aPBg(ZNi_L1jWh!^%QRS8W?BhaE!xYpZnU>)AJdl8cGG^O-KL|X6Q(;! zXG#a7yG8ey?j_x8x&^uedKUU)^t$x6^a1pV^o8_o^po`43=9n73_1+f4E_v>48;tc z46_WFBdkZ{ju;+sI)XTod8GEp(2-3>8b&e3Q;hbEA&jYvRg43S>r6CE;!L_s4ou-p znM@5#<4n8Etjr3`Cd{793Cu5;`10`AC1Vv~)n#>LjbzPd z?PgtOqhym{Gh}mPi)Slid(VbqXJJ=nw_t~}XR^1j&vTG*h;tZncyJ_fRC0`Q9CGq= zp5k=ojO8rn9OB&P;^orea^{NTs^A*sI^-7O*5h{PPUf!Tp5Y&P3&Tgf}cN6L4M&y4RTUmo9EzFmF+egl3V{&fCM{&fKk0UZH1frkRE0$&AL z1+@iXf)53ef~!L8LZ^g0gdPiZ389Yi9X)$A;OMiX?~h`IC4|j|BZMo2XGCa4PKaC= zc_`8$f)W)JH5LsOEfJj(qY*nH<|>vZ)+@FzE+K9y9wS~S{zZaI!cZbaqD11OB!eVG z@`hxtccC_^E0QpQUrS7uz6UiOr1 zplpfkf*gn3dASI=I=M}GQF&|mWcgls!f}=39>?>KPb;t}oKuKUXjJ&7cudhzF;j6w zi9tzU387T4gjSYTc2>?-o={;`F;vKsChFEqYrN@zN3=4&o#32QlMJ=2=k7Sg_|{ZxA% zaunhK$%QQHi0C-!Jl9z{C3VW}ROzYB)5lNyo~}8)udA*brrUOg?2N&gxHIqdSoF;G zGW2Hkh4o$ZOY~6&Dh4+V+Rjp(H9DJocHEHP(9y8i5Oq%NT-doTBYLAtMj1wn=Z~HD zJ>O_dYJAT4f$_%+Vi&wF)R_P#h9(b8W-dxxym7I~l+yH~X{PCCGX=9yv))Urm!Owk zT-v*Q`f}3cX>&>Q0P_wDMhjbuVvBvtGnOfq^H=1rgk9;k;<18R)mu|pTUbB0-nG%Q zd1SL}t702%J7y z$2BJ%r-x2oowb}(oL8=ET~E3G#YM~Ify-A{h-<3rI!qUq0Ykf;b<1(vcQO2`bhgk`^?`^zwzh>%Gb#ExgUw2onO5_8wix%21o`( z1uO(=2WAFhf-Hin;LLC@_}k!P!Lh+BA^IT&H_2~0-s}iH8hSf)A?#Gxvv88|YvCOT z5yTzD@-2f~MYm~hyWf5naXca=Vkgovvhfc8om+R7q70%+qK`!TMvup6#N^y1zYDwj zK2|9B_w6( zf$@Xdhe8kI9_~DXKI%(VPR&W9O$$hye|-LNeY$A+{d8Q0YsP5i>CB2Jd{1JZ>}NS; z4Q13#OEA7g*}~kX7H>wS3Ea0k1{VX?{mIIes95vg5u{q&*Put3%v^Gi_D6; ziq(pXU+}$1Dj_WiDEV4yTl%5wOj&)oZ27Yaj*9q~z{`M_tFNxU8mly_?5H|XRbDMt z{iKGqCa#vGHnWbh> z@fXD}&0qDuept0xU0(B9!>&hfFmF8Bl-#V|(%E{8x`J9p`=W{861KUw3wM-vI(9Ga z&hB~cVfW)O+?b++69>JAmWM0YARHC$F_!MivZ z7002KuBx37`MEr9Q>r4Sa z+ukp@{(EYsQUG`wqNrza{_XMYUkdu^^B2qyM*P;szuv)?D=8^|&+(@_u?=9RCd(w1 zCL<9BNSR55_RN)jQtEmH(@C`P_+sV(UMNYqJXEz%}i7H+5ji90B;N;@w5fhh?lscxY zqN;Y{q`K}IJ$-|-hUYF_Hn*?@p`4?W^K}Ewz;gQj?@rlW)g~g>$%b!=id|gF< z+uqsT+s7Ones32EK=w|8&%p)W3c8 z@85|t;7N%HaSEU(BLN2!88ZL@2%npIQCsbt0=#vpYeu!Hx=0yxMDLN#GJhg~Dhn4R z0v`m133z#oc?IbcjP(Szj|e=g38=CCwka*w%S;4l#}|kIo@4{EHADoW1pk!B(p!)3 z+a`xyq6mS#qLT<5l1(Az3cx&&1#29H2t3s)#2-RY<+E`omp{rh@fqNxp)o{YhpL(g zAdLuYh>jWzf(Yo`G{Np57wlTt{QB#y0#77iERc*u;0q&+2!yBMB*A*!QAB`nYcFU2 zE25(*L~Zv}v9u=gXsPi-5=fjx9C6}_|`3+2bsBihdq0mlK1Br>W-kR`l7T#~2s zP)XR$+IhqTR%9I8MfF| zP&u>{Z&l$fC#d02SEnnQ=tI--BE?6kJ2+^4A8Q!OfG-K>?MC0+3qp%k#5JpEew#Kx zxFFxao}Ne!53qQX-F%U%ZcvX6F#R5&wD55PtprvbPPMm=%E7T#Kp&V1GB>*vv{n?# z1U8l4tG+RFE@+@>*|XLcUA>)TjV;f*C7Bzi!)B@n{C5WS0|l%({N|!}H)FzKBcZ$J zbAMNc>SK@4;G)ToWe-E5lFd{3g}Jj&G6(qaR!!U8*yH+yU+DTmwWg&YoTz!=#(CqT zk3!1L;R|i>y7jMRII%GTy{idI`Xn@{5IksVUxi11S9+w`;G|Y**8tiYjZxq2)zFF@eq*4 zd>{gm@Map+{*nofjdKr)nMAMlj9X)J3NTOGsrH_@EAL&jvxmk%pnXjP&5&HY_B7b_ zMI`M6WHJn)39TK+F{uwPd2c|1vDd@9pyDSoc4XVYNo1Jqc z(_NO<-VwWW{Y{vF=_c6|7ZBAFH=k}$9^{?Tx6*u>&|fDJw4;j!5bewaw(!o3pd_4o zDSX%*a}RB~sGk{7usW+W_964OcOXf!V{Ae4%Wp6Gj;6Q-N#^;GwkQ#3%7_4YJ}PIO z9m|Bt*%L(X$39rJk6N6@n}zAal(!2?YctBKEvjs|Qa{hn=ex*>l5ft_JB=&86zr~x zgm^(QQdC`#$UP!Zi@4P)7wR$IDsShu5#YUm*U)KfK#A6x&alDRxd`sT+Enje@EZ}Bjf{@h-H(({U)Bpv zlVp|nEF(m1>+PD}Q1Kk%o`Vsliq)10A4VQQx+5ngmh4MV@jXO<5vM^FmD`ft9;&l# zWz#J~cho(0$x%aBIad0d6N{7DoB1j=F`p6KG5qatEy5_Y)?~d8dz}D+Lbg^Z4Bj6> z`ekS>oPSK^Kw!E%a4gmAqoG*VT2*zMy)N4g+m3IIx#`T|7Ao%$?F!){q1Iqe2NQvI z>Tn70Q(v1KGj26f<32gpIVB%0{|UCkFa9k|pU$yJ*UL~Z$D?54(bW2jvimonA9B_~ zJ%=(ii5wH8N4U*JBAB#KhMJ?V7vFFX1BWUDJhF4YPJq2} zcvEDX+e7eOs_Mmj{S?s~TOY*|o~!0;+$7NUFU@hnSqU`B=$u9Q&M<52sWA|x)7s5U zxN)!=x&(VlUNm1yQLNNodPU9*eUw8-1m@=krZ_t}?HMtSXu-V(^z$av&9*w2UjfFf zV`ir$>6kXhOvSb4KxT?JfXIRA-2}Zcq(8)CYqAC5UNLFVn^Q%*CWuN*;&t8rDq@C( zOcDB5=!3IO(mB(UHV@)+OwSqH(GyN1u0sH@BXlYt^GV@KVCUqATH*^rVK3}HhH8$N zRx>in;y-XmQ}mxss|b_}YFU6=brooKThJi^img`7@KI!SVlok+tiqkZKUL0&)fTHk zZ$2w-r=Jy#xzcECkHBQw>%Sh4m&_spfw!sU!p(b6K?j4@*TBB~h%)IA#E3gr;8-Dx z`b`q?{(|eWmvmZ3uhtUYr@{kHx+G_xXzovvTG}S)wh){dHlR_gAhR#*B}cW%>uF z5`I?7+7I8%-_y=)*w(UYbN$OXr=QFu6QRshml;_ zD8|Cwq3j6PHAEbQJ5;{pt6Cb}ygWVb#E@Je3MbFP5y7Chv7$#Y?&QmvXh4yn+hCH4 zC<~(_P*3CIL3Jm`aAO0AKu6CuWWfl-Gu)1tKumHGhOn~tmeZ!fhxe*Jd138|fOKCP zuj-aYrM5Mi2;g|v&}tnB7R|8I@OC0lopZO91|z>1^)^jF2=%V)c595%r@lQ{)J&5> z%3HRjkUY+sIx?1N_uI0!vISL7hEjd(9O4VMKaOaU2*rBwbITJt+Fx^ zH9Q%ucltTp;-rn+o!NKQ6m1nl)&~Q7#`q`7CX@1e;i#=PSvuQMoUuS8olHh;lmC83 z)Tf~)L($6DpR%nIM+A=v(Vie}O(@Mir2jv$Ab(%%5yTsCH^Ua3 zX!&-Q9seBZEYAn$JcUVHkmuGS#4g$eXet%jb_Rrm&R83`j!iz$Sn_?17(145b%mV8 z3IRne6z}5M-y%As@_jYe-m4RVLfaceAX!p?ZOFv10)kRss>4_34sPMY7K(=lZQD@U zDR5SMFo{W>AkH_9z$`0G2$=<*Gj8zbN4>ncLu}`z=eZ3zm^0NC z2WvNp!0qn^CjI#_-T=9~A6;my=LbQ>!l2t@`$T}u3xZh|{PQD65@KzO;X+2-XT$=_ z5OSq#i$M7ifqnz|)03l|3o4#i0b;;r4#!YW1eP8m{`_dIMFc9ERn*<2 z(}}EL*C8>yV6%Hc6UAR5szK(V@c*z=O#j+Trr$ddHS)En+~qH#%lpgo{{y0w ze$%P|-(P^VnNTA-g3;Poi|V1ST#nQcH|t1gc9(twSMlVF#}wo)OK$4x{nEuIH*;HE zF|6qLdFY1pf(07cUS?xHvXi{*(U29O!S?9-#V_cvG6%&xDRRx54-d!d1!-YI8%p*r z1Y|ilYRt#?(C$T{bL|EZ=FM1cHsRMyppi*R9BXMw)B z5HB1nx0x4uoa(cM&zs;+Ow^h@m6}rd+2rFn82%q4@wb7uV;P6j0og}J<>`T=bH%)= zQd@ZR=%D{7*8EfSB4NR9DeAP6!()A1Eob7{#e9s_A~af%CzRxQs0jvK*xf?JE4Wjp z7Q8m^XC;`9CeK*$^IP$AQCgVglT!aW4%h#KDd+Fe{qXOPj`;@_L{>mjSO~~-mFrnzg3hPNK$)( z+F&Ob-%@yJ3>u@wh`^#NVRje~ZbgwSL;$T~nRfL9!5=dJ0Esf$np)a};R`-tZew*| zQ(3inp80DW+9GIfrD z{Rb96#R*LJ)74oijNW1ey(@F@{$XlTn7gOIeCFp2&NFszPba)06TPL8u@jOA%dLwo z%!&|BNVx#8-6EI~0S|eM#MVY1T6&KQLxu@j485lO(I&Un~knK9PD79&Sg?;0SlCfySWpl{1$sQqV;(S;a3p02`GfLwXs;2#n zqfDjvW<7h#$&AsrcgsekJzi7QQ)LXE3>2Hd3F9LXj!9FF^MW`wn_fYd8WU1}>2~!4 zG<)|ZvebTJW|h;DbN}lNx76`K)`IShDj^xx%dZSQMyPWi^%w>0w~ z97CwqLLctJR9&LB0Q_*}lzaa0c=zm)>X&gnTgGvVmWdw^f>eED;$SSQ9kJ9_ryzs- zdwV%w)$o`!A`qMRlcEgmH!VUNB@RG^+l1Z;yY_oTKv92m;vSIU5FVUCXb1%Z(~?!5UuXE$x!+eL~<&Et$`*rw6;hqvT@r#J#5mF z&6V_IZ~0a#Y=y!u{iF84WD-Wy{`Mk+#-46YcVqXXo^+v`$7Ljm`$ zt$yy83^o-#Qg!SJlPEWUhUP>6n1+%zHXuXZShPG5U^U#;{OM+j^K)v?0#$ zT|mGaSL8QJaP=uy0MB%V%QY08V1rN&!XRi#j)J&&bHAInsi1=Abf=Bk3uJa%A}2WZ zh6>6bxgSjoe)7@IW;stp+l(r?9Y57T&Pat`!LixTUB-C7SVbPu)F{COS=D*du3CHJ zL{pOuIEtTPVk{%t>}~SZSkxmDi9l05(*Ua?=Uo?eH<<^!$Kp^GcOlhD?Q=1$%;=yv zuYe|FGjH;*A<-4Zu*QJ5tgCcKGI@|bu&pm=1Uj^tM zbl4w7wQM4v&0rpw*tto&CQLc!k9~->a%f0jQmh%`T1qzWDDl30g2L+dZVUUt?Wc%# z{brHk)g-i$Y4qao!tz#U5(B*dLfy@)Q_X4N!pMf|h-oKZb>+&+cD5TO%%72S?XM4c z7vwiGQB8}Q;lfquteUyb7HG$Vsm>R$R;kqYvsWze+#61;HXlD)zkYD{(}ZJI;+M&@ z6gP~!pK3Scwz6WnvEiW_m`h8rX2z!&o33z;l;O@Bn8wwuPNpflDc!v4;g0QevBsF- zdrz-9CTJ#7e;a*$wF>8kZ_yUSu?}x&Iuye#yBd2VorXPj7^DS8n1h^kMpE`L((034;l?_@7BJSg~P!fqMIn>7UCfshRje@b)P+>hK zg_BeLPE*Sx$KScTrZ-tlmeZs4Q-V*6CXbv$7th#no4#NWG(fnht z^#wJ{kg(CzXI#e*q$I!e)Tw>RjeA@5j-ZI>JBqf%-`+y*7>& zvnr(`1aa)&P+=l)U!@R#BZ&x{Vjmrs%1y>6dt?o%=nEv|fzHa;zN>KPAlhILtRm_W znzJihn=p;##EA+}HlbXS_q>(*u$L<&rh%g+d%Jq%uOkR^O@7T|sf3O0LpQ|$H%!x%X zga*vd&>0VhLQ3H)XGWFX@43-YXDlBnrtA9F0F_ikO8n95w)O67U!_|tk`-g6oNuV{f0M(&b|-f9fInYdDX}FK#@ikdTg*n z>VC}97sknwJt0(3dka+;Pf!ZX+SeI!QE^ma2oLjT!uyZsUMpkZ2F=`m5 zZ{^ysH@t=|(TK1pR!TGVWBv4KjXkJ3t#j6)`24A9M2Aro)^YKA8k&=!xxGvTuE#U) zL3TTX(!tMQ_P)V(tKec>#F|o+Mczw{S?DxyPVM`aYcqFP6$= zHaomzeJKX`I_p(s}tbHGfqm)hc?mmT7B%(EZsbMCjC?FVr;j12yMen+%Ye+ zbs#&({@mH-Z?RFy_kQl*B0~Ex{yJ#E_y$@q%&YYH2I{T2=Kkp5q;-DCm^X<1Kse5B zqip(j0smjfpxU#{bA6G?Mo5ouJ|uf(WHe4nb%Fv!^^9tmKF}p3rW?9P)e25~^3Yqm zSTyBx}#3-@bnadJO&LWa%FacJ1C1-@Q2ou=*H{HfY_27Ib`P z2INj?z+!)ks!2>nd6p;4#j~Jb<6S~v?NmhMO}WbFOw;_h!Zd$->!^>25cdTW2JpO9 z8RD=-je`?`=m)5+TV-X<7L5<5M!e+8@7=L3XcM-XsNPn1mH4%j@BMw52GPwv@eHzW zX?x%)O2pexYgF(0!a@aq_;l-$3>#0&ws2{5cGjXhZ$C_~rUG-O(7ZWn=Zoxo+`Dw~ zr1g!@mG#TG%CiF9^WGR)le;)`)Zw?SOBk&n*pl?SpiXT;)NDq^L*%4&&3IgfdEKRA z+X)*k>*>A+8@FuEv3~kCb#X~BkH-YZy0oV;2m%8rg$+i0)>EWCM6PaOM*Pj{=g+Iu z)kqc(!cbkZc>0)s!?zD=*`miNnTjMdqruZ3@^<{)9b{CM==n#~>KKpUD(YuxSeEuA zYA|67B~Y5)MFdxH-~Je>LxRd8!isj zvj*Q+?5j~Eng0w92lV8?jinjCQy5bOVgW(_HCwTHNJY8jF>ikoW%muy;t=ijPglBw z_N**s&FdRHiWqs6C@Mw`Ay>C1#i49C8FUHe*f`c?zEq6~uIQk=gjJARyyARtCeuai zGSfJzSp~NHXrJ4B1!Dd&@)c!XDZJ@Q6Uyc#y1Tr><;4`krHeNF8HW!?tfvOO^jeu$M@4WdrqDD{{5ty&`9Non931E;|~X%1J&<>qSf(%?EXD@f$A= zu&st4OIqlOm5)~mQzXc8S7T>5hRo7Pxpvl0$oZF)6s5S|6W3_b$My5c)RC%w?8m0N z1iKeab{AcFUD>WKlic{EM1($q(Z@g-DX$_pxw1EbX`7YB1fX-5Ix+;KG(*N}2_nc3 zTPuytjl)-Odv1DqY(R~xZpzmQzYKO6381R8;?muw2*95+zkKm7aPc@WPhxm$RWkQJ zI3a>gkN+Zs+acO*wABg22mt8v7lG1pDlcvXnXSryDx=eJ$8h_mS`1?TIq1u=)h$02Ge6fpM~Q2$8L?pJYU7pxxNj{1O@&laqt zimEmtcMa!7Wln&{;!Wev(b_RVM+!pI(5vI;tv@f`e05_lv$o-*-kTZ2V;2fNUaZ|Z z_Qha9upM+9HQD%z=Iw0-(7h4s@}Y(cf=5D+juVc~e6i%&eWIRrx4g`2(^DjI^-8r+ z>6zrxi=AGzou94j^{1{q*eBal!#9R%VBIl!JxZZ+gUH1M)VsQvcNKRj#xokDHfsud zi~GG^-o0f~dFqA-fAg5kw?|V=C%=ZWVwpl&F?9LQOR(}^QnR_x7vdL;kTjU^J%Y{5 zg`xNFZ^pEo{akmmMyW@h_Z?zZrnKSIF*yZY0?ith1ujjX*~cD-L8@{hz4At~r#^3+ zjs#GbS@g+mRTRLS{p&1lu({E8v)%ldZqBMoGVp5a}YFMP`k%C|i6S>ZI)Hw)2&fCp>R&d8B+H@e9nl!^Gug zpg`ayPlk8Rm9NpZMBv4h)*kV~HTt*(6>Ye?UiuV`A6!88Bl(N6{S7YP2F++Nm zF6hU)MqoA(>+PJTc^)-?9k$IEWyCm(v@3`~l8@()=Y-0D9w{9C_^-$v_wlD6NNh`$ zR@EOGh14;S`!c*vev$KVr|;^=bk0Q0|EAX$aIMJ^J7oUh2O$4E?^<~>~k0@SwF zYvATbYTt6qLK`}L{cPw8vq4M5R^XRBEyHyxl_dEDO)<(+WttkJGe{ZM^>^wtb!tK$ z@7JFs(3{h)kA72OJRoEe4nRMyyLqoiAS;L7*w69=;qpNpyNAs`S0VXx(vs?noX+z9 zQ+AGRuu)7J{Zw4i&qpfA?%Z#+azlRn5tq{?jug?V>qPsSaE>CR; zGs2%o?9MXcxoSo(_t!77U)WnbYc3Kg;8gZX&Hp`(C?1gDKQw>SKLT0W>?Jhi;5l}s zeWO%4omMug==bOZAeWVMCifBBcnE$Qd~1u@`qB3L0;~<8KLgVJg#fG~nQ$7OOE9NI zib2tlt2SHeS9wk(=}2?{W-(?Y3a?hy|6hP=%_^mLP4`hmnl9chzLN2 ziNG9a+eEV$+s;Mf92dN~!jJ@JcMP}ztlSbC-o(W)ewsOzq$tsAa6Tb1PuWjl;sarz z>AMOMfvv|x022qP+9U!ihY0Lv2tl|;GA}(U0lAJv9uRT}2cRMGZXQ;9i-0HuwG12- zi_&IsCm&zw|7T@n<-4mx?kaz&xktIlyJlGFm_4hPs@5*F;VjC>WI$Dv8|GqAGb&rH zky-E9(rhRiF_Uj9Q+#AjA(`f5yIT1oC8y7Tseuw5!#}I!-(a+%f*RA`(W>=fysMRN zL}*lbKNZfgIN6n)?&%B{vrSauEBew|CB`{|3YVQnLR|WfYf*KYk~c)klxM)pk3f#DzSuS174d3vF2wU=wEm%aT7)rHxsUi}|U1J3E z3fPceq>5I?a*Ji_M~_K))14dCv3XApLR6`@6)vaY%9ci2z zCWbM#Rc7K#oy9;N>*CC@ZBuv9=bz!!3L?AP;^Y*G2eQFR)QTc}ac!0pH4tYu1Y!L; z))+Tor+3ps?wR2owd4GQ#sa9oKJH~chlk%7eD0w*=iNgu3Gz+1XKnbO!CY(&f!NZM0aC z*eZ!~-r;B!YKa~Vh0u&T*_?1NB=_-jKf7nbWOH}qks^nO1@BF5`lym4x9-B!lpkW#JG$8@UMnfh0d!7TPr!f8kj5r{KJ z2hDRvH|svdF!r<_A3supsvYjJ{ARye7ZUX;Xv*uEo7|?XnuaE+JIrZYc4M#WwB*^| zlM4{HiUiP^9_dc!ooKyvr`fS+Er&5wjI*Q6g11bQK);9vgMi2nrowD(45gYDl|ubXXFJFhMcHj zH|^tNN9`;x)K}Lav>6oN_Z)lf~sYP*Zerci!&DHF2KnNz3@-LH*kfn z2O&3xzZ$(#J9y0dDq+8ykD#NTn{iZAo<`=`NcrW3=rBe{v%!m0Z6VNDh&%Yw4#y=N z%LBa!iz>K*Ww=du-H~LQ`0FPi)6;>9#;(ojvV*NtPd%C6*fN%CGr^_1p^OB<355FZo?x=+gnuq#foGnKaJ613M5YYm{;$=mu-S`ngm=C{Kx-}GYW`@}jl z>wGx~fylQ(FDPbs>qlesxVdNe0D(E+&OLWCv>yKBfOBzMy$rf}i((|`X|<0&TXp>M zeMs)A*QksjnUJFv$~B=>Je&&W3yoK|Z^7z@dS)(C#XcJgXLHl$d~oJft~Y|-Rm_O9|{!~MPqeY|f-NHQUE-z*Ea$Ta?XT7=%eWNSdn_Y>=N#`v;=XA-(?WN9AVB!nHyCGbf zMvF*laIeK%XzRv<%b0opN;P?rd)LO*;hWbckAB4Gn=bY4na5X?gJF*^o}@a|dHzC} zUWiOITgzBXgCc1lXENx*m^K&q?hOaTrNo=bJ!C6J*^{-Sarf5vuJk?Y)!CIRVP?Dq zHt~fiP^otbULvwJ+?lSlI)bcvXgyEJe4ZMD0o)Svt!5uf>mrD>3_UO!3^gu-9VXs$ z@?UwYmd&3cb_<4nv@JX)E1Kvi-DCb5JJ+l{6N`M?_EBGHj?K?);tIh@QxMI#5Y%BZ zVYJZOfmiCbPRt(HK0dlL$2?Gs0pEe@3<*y!o)BH%Q_n7`oc(C9L#A|2l+0R*S)C0a6 zL&t<&u9Ds8>-E#ka2ul&jJ=P$fHGbIM@X6U(pEQ=9e!z1AZMwc0n#DBKI~o2GP`{m zUUpvF{$U&nYCXg8WY*r0uBKLS|K-8an6&V>8H{%cOkV{lwdLV1-P%H_-aI&SG$N+P zPS{gFSUFgEQXEa*$7ex77~o5O=gwE6DD>#@%Rpz(yhC!ysIEH z?LN+DpC5<5@AsyaIscW5=E=$-_u0IJD*b0hPiiR@!;Jm1l~Ji_OLNq2+0^5r4RHgO zHL34|DjQPYFF2;3Q;36ei^aALuTwGu|F}fV2aZ#ztdGaoKm}t>m z)u({3kCrh@MomO#QEuF82R*inB)*NgU1^+B_6&Hx@&7? zt9&)OwmIgQQ*HW{l69x#Bjft?c@Sm8CB~_oy;!DD#X8h%0VQ$KaS|6_9W37w3>`E+&hvu#bizEnudlo6A9zgKtP*@X?fz&l-Y13# zoKDG@yj`rvcKJ4Fh~pK@c2%Ar z*JypL0T=m*Z#)Ow=OxVTk3eyD0^y+1MOz6mXNo`v3=JAU?)&EblL91Hz8$&jXa7?s z|KP(I90ok>3Ldym!KW8(uv9X`A*9!<> zjyOT%Y#~&oK!Lb(A0XdfL6t|x*|xvA?w0z%?o5)0K_=b7UEXIWgd%szx2LK?@dhmk zbCCu~_#=(hl4dWJ|9R`Q|Dj@opQPzuZ94FYujq60&+LBrxj)KN<4*o>>4Vd=1j3ae zSBB=ZtLwJK*Cl-c+Ow;ahAKmYdPYIyrR=?!&nsQE;d~89l<}k1g3{@usqcq27s`Y( zE>cr(%Ut5>B-LNwT`Fe?Socw~QZPYY`WD7iw@S5JNMmhSgE(qvr<~P1@V*ZLGr>q6 zY6yqFtxY5mQp7CwQhmHWBKY<)6g7lInL!R!HJ~^-56A%r(&$TtJJyB{{0S1a)4ps(f5G9I$@YAsdT;D%;f`%&)j7C2K zC!%B!NWGdO0tdWzRRwYJ(1IOPd|4D&EiE(*L9i8^JW+AT__sj*7RZ15 z2#ID7UlqjVz^3$oQ`XQkrQu%ad2JHH*ldZlu2TT;mC7+qhCe z4yFj444yNBzt|)T1S*5Cy|N#WVf6{4g0+Ywq^Cy3MC2ZOOh=>LepIsxXKbim1^ktq z*HMec2hKH5m~GgPK5@vJ_L;aaQ+f-swj0hF{xPSDYQuEVdkS0}Ve%8L)7|0JpaZxt z+!r(zs=IyRNzDX*7|+7zdB53+XTIUp`;S;EBOh{6_!tk@?6MvULCs||@_B*#fV&X5M*&91Ght4*3$TpZgEuxXT zOrg2_21kp(L(s%&VMbB?z3Kyzp+@Y55zUCj3N91T_*`(zU5TaBpS@70e9IcJZNBkl z`v`~1iT7o?&(w4B!Tl}xc59Jf)yf9VBI6xxUW{GgTF|~AS9O-9^-FEz))~2B13$Kn zqGX@YN*9aUn#so%rO0nb1m{di5NOA-s^RaDoZw<`r&V&W`BIrggyZlsIPT- z&;Jkh-aD$PZ(SQjrHC}?ohTqG2q;pN77=M8B1KRFLcy|N8QSIAs7toaj3a-_lNEMs2wc-^M-P-7uzca&WI=PDdL!5#EjO z@{V<6Z?`Oqcg~d`(OW+~DoB0Zn02C#ANYKVoZZ ziA*UX5l3%US5>R{?KdYDjy9gzOg!SQUDloMJh!rG`IQE)R{HJ34`@+RSUZFjBl^8r zZg@*m7&Wod*~~bup3P<~mCE}gx$MHGeV~Wq(vz1768;YM!Z%-=Pe4h=&>8&g!%QXo z^GPwD$wn=|GSq7e9EWFKO-c1A(h&ipn(xwU4_9%HoddY)Zts&PSx<-{7CmjU zn>}fu9d+UY))ARK%hJuakAFdxu`3uTTxcyrY)`vbd^*PCkT~A;7u}^-c1Jh49B*=! zFjGGsf*X9Pa_BpV{eWU9(r8IiTsW1RXv{tbJpDd{TGl~&`sO0}Jv-b>Cj{SdosWrS zB#T!F-UKNY6eA#wU<^oYOj5a{x}eNdZz7u*t!;H8(5A|mLtDYn^J=YLO-R*!zH@|B zM5OpapxUp1;A?d;s%T1);q5V2a~ znw#^tbEs7)Q#&nU8f_P-I}0_XL9nf6nk;B7Qff!03kM^xc6$=TXXq$aNkBF+4~Cyu zwfD_!cVg-7Pdc~2@^UI$F@ryS+aDZcHT>gYSbHbAxrl%jgspM{GovjkSr8!~ZWl^= z)w;p}<9ECI1j$I9kf2wRDY~xbZcTPB;=0$b9_V0(HC2GcWk(Vu%I6W#rY>zU?l^Qd zv>&j0vR(_BjiB_%VPnNl&vwg3lCh!Sx5E(BnviG_D$6J?cwY5R8OnpcOqMP68GyDw zb$sQ)p`++#&(J8em&)ob7;&G@UO>PxIl^lUvHP$AYK|v>ZhPf5Kz)%bqg_)$2q0sr zr-EDcZW;pyFq! z*3e87c{;FJE5X&s3CoBn`pVX(TZ_7`5RVOmX2S*lq5}=_I75yYsI!|yo0!Tl6|T}L zBOfj#S${+I%poo98?2F=zDnpd8Qy&|M!jcxW9IsI-uo6eNq8n~t0ETR0e&Xw8nuZW zkz!0_-n}SwEwZI4`CFW4Da@JQl7%@`pa!XWcm8Wt?in(|pelsSk#cpm(*Kij?dISQ z4~p?&35E{?axPfU0@gt7%H z!87orTjkHQ6c00-*<2Pm)V&E3gyMr3QzKLV+)@5Bwhry`@>Le~>=j;Y7V z9Myb6$8acK;*j{14UFQOUi`ahQiko2trqt1@1_Nf4_PV^y~AbkYqcP!+3J~+EBu_s z+VzhhqpGU(Dcue>I(u6mefKZA2LW4$T)O}V91r;i%%I-cwSN`zuhWKdK#hhEzVLmv z1uzf^s&-HxrCY8t{Z@FGQY}AZ+`?`}VQH83DD1^xNNwL{$Fbj4KnVsk42)`~U$w;c%YXSTkZ~CN9vqAN9u24g6&@6^ z4_5I%g_}Sx9sHyM10u!vZ~WW81s?w42GBn{7khVchvk>(-y|dcyaPy+{@?m5z4a@E z8v9#-;?L_rf~_Sj*bl$gi3eep++;s$8%^(DjCQ!Ufo6x5_^pod=hgo(^xuYj(%#q$nkO9i!9LG8-&Zk9%8w7N{Wi5mH+@-#{W% z!@=-AL2A78rxEnr;5VDOl?Q5CGq2H~r|vFGK5BBup9M)w_%O&xJOUaErf>!O%nm?q z469!z6b!$~?%=$|XQXX&^nRIZzn5N`?88Wg`nuQNx1t0Z1!UMtj_fkU>v@`PT5k+$CIcbl>|4WC5CIfL?NDsb49F~UsdoUA zUwLt+hHiFFBFe=AhGSB~k~Ouku=-fN=w5->S02keg@!C}eVE+Mj;>aH3>S!tR!*9A zIR&X=<3p}Fmz_|7S5!6hk8mF3uv}GlkasAp_4bEU5?&_0*;b9z^||G7%3;ZA0xyhz zG)YY2RRKHO zI(NvSX;N5~lUn?eyD| zLdnDXZVA^;{Zs-Zh*%*PIIlXJ-@ETLk%^MLVH(p~vsmiJgsIjym+in1%%|fISVth| zBY7AcAdjmjy62z_>Y}8TrPK}Cq90H;V$CpBwg}L^o$`bDShh^*o?L;MM9=Ms`b!rT z64BiPj|LedOh3-?lwm@@SPBL%gD2XLW{uHiq51y4VwV(^r!kEI%w`@BNqXmHqmK#^ z)sK34d5++PG&NC|yG^J_vU0}Kg-?V<)`jdSRrTt@dbK96Gc{2MS5G908Kld661tRh zu)F+?v4JBDatzJ1JjjcduLpkR&A1?nzKcnd`I;Cug=R$Ut(!j7^X;9YW;xv4Ovv44 z6@K|hJ(6;B($#v`n4v@(Bx>l@2M*@7@s+VpPAlEDKSK2M(WMU#z2PdaJ1Q5 zs=`n%>080m=e~>+7dGUr8;_oMg1FyLIdV2xu}mOjPMg|YVbh*Tfx0!qm*rVeOxhk? zKpXjjs|7N^`*M)Ep|7+S7m4}T*97x`EL6!M2N4p|sIx5U-SX6YeLpn}(<`5Q&iw4G zghFSiy7C<^&)0sxBjavr9!yk|!{qc?OWa4Y>~6@ni1$k2u`&(OFxzW;kIrEpDF}OL zoH#Yt+y6|o1CoHbfIbhvm+v(zQ;kRyFQB*!RHa;_rALNNHCYn@hDXV=FV5UuPLsU1 zj2hN18x}s&JJywsv^;cXH@ZyOdCY`R&l-ze6H4`>6G=oJ#WP}*0W(`NbmKhHMh`wF zBx}24J-msQEx89^Y^;bgy^=M;Zo5kOnyL@6=Pm5Ld0JIMoesb4nouk^7Kw0H=^X+|R{OKZOtDyD;G<-Gz%UieXcYoV|uV^hzBlrlh3U(1w$}x>}U-mN+`lvga?; zJO&^QNV)Srnk8{0{tj8W&drxoMs62oO3Nu|sQL?-!I?(;^MxbEGwLS2OUv$@-=a_6#<(*v)_b+spB4Na z9*#gCIZ&mlfikK5K-hIsb7Hz|Y*;ZuxklCZg?DGcqSCf?@P9J zj;?5eXBY#0TkJ z$|@G((;~8SUe_vC`P%Q`rZ(hX( zp549c9j|6|mwNRrZLV*b?U;evlRZBNSWvZ%~J|S7beE00`qxwt*&vtc`}Z*m=;42!QVc@I!`TuD%(M{X9^Xt@u#| zZgJO;7GNc`~St?M>7}apxV~W$$!yFaAPN# zKodEoJ_=up39Sp1q3MENf7!o*j3Dcmj(3T6`7j0gNIsl)rjDH50S(jSpR9@+$aZcX z^9e94PDA53^M5%;WLD?Ak4_4qW2Y-el@A(6!nrV#NVqW7gP_I{PFZCAi9g~z<6gjA zUsaVZy!`YL;%q%Kp<%t^*F(_{>c+Jda_qf@QVY>WQxtvd!}6&V!aErDMKe=;$E=84 z@PjASM_N17MNWEdfy~tQleEsopZaKC5T9Uy&@RI{rq@7XHBERen@3X#W!x#xm~2Oe z-uhTKWHB+nQT}jDHyQbO>vGp-ohfk9%X~!>MfdRA`3AS9YaXuF?B&2rWMNnCA!I})7j7?cIB60-H$breJUe>SOC0NzGt!t7@u#SIX5c`|+N2W44{Q%y|`k z8Z5lM$50`={4Y93c+`GbC79HGHV7k)oOzv2izvK5YltA8Q#=Uh^Ot|XrN=dxUo$P2 zVk$~JS9}U0bL<)7CYRM5Pttck$iJSoKrJ1o{)1EpH0Ue+!-W3R)stg9f6G<`!GeDp zqaya7R>>WZkh#gH*9U?V|MKO5-T0j+36cL#&F3xykGCe_|HXZ_^jVx5q(FzOgahsM z0kfqw!v+H`=d)qgKgQ~Ri4L{Yy{BbidW2pixTpNx_dGBIG~V(OF&o&c0yPl9;NXx| z{nKyi;La1H;CD&*eHPQBlnh!CcF<+I#7*VU6x2o>ue|nO;rsu~Oy}~KKjUVLDGFq! zZf5ACOb%jd$R-slGU)1ayh>l_V1x6hE$2ye+8r6bXveW*@t5EhLNr#|5!5mP-ZnVX zK-g;Yc%~|neKL0(BPW*|KIOM&E)0ctx)(0Uy0?}qTL2Jk2|FKM%t;Ud-W$65QVv@v4H>!`aoO#6+m$^7aaZsEEoAa%Dji;SUF z2yVhw>63;TcgK9vl5fR-iD`ZyX9~TbULv~P^A4l=1Ki$s;)Kyp0s8|em9vVx73C`m zHw1LZNMj(usi?28Q}kN};*B-VWKpSWYRg@=*FXDG^4o>zC#gJOS2uH8Flr=F-ksqG zmJ=ho`ec71q;#e`zz*w7yeH@O?an7j_bZN$r-D=?D~RQSDD9SXW02<-vkTJ@>h9Oa zj0UR0I=|g)HSz1Y>v#DikCWu<6SO1EXE4L?s+#8*E-E7s=tc$|Z-PpY9l-qt4ybNx zINcq-P+mp3_OXK2JF3JqR%Wlpf1qg^IUH}v`m20)5Rff+p(#l<0S~|A~HM{w~i&5^F#B6WPp*w!shds zk*uSKIVE^I>~t70HKuBjnKtp%uYGna3?rvH|8@RbRQ|op^nuQ*FCYDTcV^mB&5~O^ zFG)J2|B|7!9F}NaM)6aE4&V7e>8MYDY&)`j8-(*`Ae0~svzbDytM zK*2ovKKXUZe)1QP&1}RNWJ=tW5SH9NKTO&#ic~0@eqf>xJ=c9Mn`9Kn*u5MEWI#IP zxjDEDF4nr4-va_R^6grZKGV8(RQ{r?YEatO*0EF%aX@>J1}Uml0D@@Dv6cWFbfkBd zhjRIh`pkY;^sTJnZX;!1fFa@y_;|PJO;1VE&8AADLAdlV7;>9@E9+7{d44ymh@M;9 zzVQ1~+l;(LiMd82rOu8hH%UuN_5^>OFE7sN1-=)p;TVcMM@eKi$9SRwXv_@+d1k3n z{t(P*Ap4e0ppb@WPJnP)3 ztLs*hSb#O}z(9ExDhl&WDTi(9K<=-)niBAmaA;c#ryiEQn%HgIt|K z129#FnoYHV{L#|93#uM>_WSgQ1MEjydj<>NCJ2N+V7ArLC{CjdgwdWFgQ^@~Kn~gW zKU=?X-txw)u{rT{s+N}yGdQR}vuS_PeF7~(o+Bwm2m zE*|;n&l~>P`nTK1rHdbiX&(J_V4FP~Y#nSePW5+tIP)Y`>Md5OGd6+#!#i#k7F`di z>4F;_Wl?oCDo{12GC<-$8ZC90-55ybAR4z%m8F^i*`;h)7&4-9OeN?v`$KCE*(Y?$ zLC6_wB^YkW0?(htUr?YP`bX=@4ycM;H_CIp?Ke#ej7{n!T!P89$J_Yx-GK3c-Mz3& zVB4X$K^b#U!Ja>9FHTNkGb=YP0|Dp>Ma>@rk2**yGD_;qt41u4GB&r*r zaqtusM?s9z&Y`#oPN6BRxHQ;y4w)vH@L zo@K9K{p*xm+<3hVc^MA&c?5l8*sWQ5m4an&KDGK(ueB5mD%>QC6*=X>G$O5k?0H(L zc3!rXzG{E|)p_Z)6PN4D%vENgC$O%@A$TFYGen3s2xZ@>Vp&@}g4nlsTeMm%0-p{$ zcm%;&wetj-N!%Qweh~O2?}K5^}w$u$y$CosU3S#33_Bz z>;#!w@dKkUhCaHVSr(b9%moB>KaL#{6t8`h2>bXB)>ZV)i#19pbKJJqCTL}SfcMvB z4l17>#jXyJcOqLXUMItl{0rwBNah$0+F&&51u{iij5xh?Ioy6bA^Dx(Yw4gPYPIDh znoP7AIEW}XL#De9DCq#!qxjQ1#Pxl(!%ORufwnViMv}%Z6Y;;w_D<;1d-Bie&)jyg z7Mf=mEG;Z=Est9y-5=mHp7vXYL}@ajyzhe%?(9%2+tgyIq-C3{rxmPREFWJ_`gO;; z=gCGAYwU1O^fgD`pZuVyzVsox^6c~9bgi}=vQAvMoNih`{`dW@hzskyKV{|3$HrOj z7+-p6nECk_?r~2-;<1Ywkjm5t-}Kf*d4VP}gvJ6P+#tz!%jzwKwL=BhX0ltgN~aeW zjq^L|v^Qi{uBIxCs5M?CmGCU&n9m7nF4+#g9QC5 zbaf`jBz;(a&5qOcVTH;N*HW?1THd?2BdO(JT_axPQc4{;eU#!m ztjNKS%C)Mr5nHqx$2EyMe*923zGU!Lg|l02PTbA4PtwGYRYT~OH(ZA{iLJ)I02u`H zT z83WE$`C=tBtWka`21}rY5eLI9v(!n{X6ee2M)|y`z5_R*xtA%ct5?rN*xyL7RXA2D z$WzD^4G_SMP~~EK;qZe8)M@J&O6Hy>Yc1e__H-f|f8fB~xUW9!d8Toa6Z^IcIy|NN zZqb!dAqTo(X8wCC%|^k*NvI`BCaS)fylpo}P9 zWg;v5E?L=Q&*+vxup=6(_EDkm;;_(rf z(imlq|Gv;w&S}{RuV+itmO_o!4GhBv5;}UU!`3${TUv)(938LHi@ktoo0|WwOYt{B zK$%x$<9{E?EBx;|xx@cAWHRpTbztMHXlcunfC|a79ZPxz)U(_xFSk!$N9;dJ^LX6< zp=o0=RW0|H3AOfZkpI2&51)e^DG)Gm=PKmjuf*Pk&m>IFa?GECKLmr zTFOu1kP&XKPN9TlUFVLJwyVd_CGxjrD^-zKLZOavGZI9(B68cN21`4XbC(SY8=>l< z7c4S(JDW3+1%?HKULAVw%j-yWanqDFmJf{^WP$5?JijjRyl;* z5pSzp=&g|H{Usfdb-#x`#5yUsn|Hyo6SR=mQ2e**NybrVBVsB^uK3-mC6_bpRMvu8 zjKT~NhLAv&-{`bAPnYt&ZmT?Z@#Fa`52ZKToW>STkOzY`B~dQ3U2u+Fm<`gPk-pu} zNqT@~;|z@TU3AcKi9!jc^wnZAcPCtk>R*CBN1{~Z=f}N4Ihm8oO5MtUu$xWwq=|$Q z`WHC<{O#ifwbh}k=;9uMh%@2x85KMCo^EwwxeD6w3@CUvMiG%nLWQmwPk^4@`7uQs zRpQJHL82z6Jh4k{{^sTx^tJazqHRSgj6YP* zew(}9Dq*zF>1BMYf-WGP8UtJ+LZhL4xMA|CwffI_L{SccO4x4cWur)nb4j^UU*6cW z^Z?Yfsb#|3Yj>aP4)zzgxb2klpyCK|QCLSj*DP8SNF&PgAorSuM;}dWlyBzaLqG3s zm5%kDO1tk`mfZKIN;l@_QFPA9i?)3*YjpQ!9xr*aB~!)}2t2q3mTC}ft|>+e$z~>4uKif#e6e#SIR4p$|YSvk>}q1OM4&m1O6u@eNTvq z8V!IqIs;5EEQ#+V>1Ou-DdMg7jxZH_@ptXWyqZ+LGSaad}4pwI0_=tEQsb5IszmW z#z1LYZdQvoMz^DBj`N?N9K(qgb@y+C*ed9{mqFEL=V; zY}6w!>XG?kJaSNjuweLvPm^VvkZnyN%S&k|Lk-t4T+hlom)Jp)J{twUXI7(+Wt1Ci{HtQJ_vvJVhOoE_ZIKs-uzm#T?9o{OujMwrJh7~#&xGoSd} zezp`-qor!hb?XTHk~V`~)icdE+s8MhZTd3nRz{LC5N|4gv~q0gDW!SC+d#-tj65-H zntFcOBceK+$E`$a*aESx#QWOFk0VPiSE2ig{xGx7cF+pD!CA~AjT!RgT0FL`<@4YS zE81_6Vrh--m!WCmw?U)k)a3dbN6A9<*qnq+Zs8umD}uYZGlsSIsNw*~@1Td0x?_Lh zx#8Bp#7H343@9k0-WpjMYlJf96~Ng1#SL(44Z;x%YCX>)j5F%R?=hZu=e@h~96S;; zXDoy9cE)^|F6D)@v0cku7hR6-Sq~{9WsgYI;*a=sIzFhlQ5aqra({ZBaUv-pZscUI zchHrk1Jh8pz1snIRt$En$LlU+$ogsy~DRv9#J#U2%N*Py7Bw_YeJth z-vdy?3EyMhbXuy@tNg_hR-Invl?6$sEhLNRCpQZn?dtR;sNhk?6oZx}z+$8(JNS#- zq#985fzMpCHNWO2-idmqd&^VJVaX2j^04F}vIk%x_))oY^+;!1;%1I5$%7$mxng_r zS_kbsnW3|vFKEI>8Oq%fuXvpGV}uo5#*2anUj91WN}JdRa3LUy#+<1_QPxDs=`Md#+P~Ldm^b%~Nir*_uGFjx!#~_; zkcZMXZ`Ajdb-nHS;OhLv>O0#TE#1BExD!>$hFRJbKrz{mb{wRk@i$)u?1^QiWHA<` zJcqTj->F%^eMHokJ%USDyk?w`cz#4M(p^T?S`jU+%n*3&cQ6#3N9VH}!{lSFG<6TY zneTms09ifbq}$`TdsPR_W_M1ol3{}vJazo|7C~;pIT3W22Q*K$JDMs_I1JDyHCLkI z;cFv(Y%eab|5!LagE+dxx!qOLE%{i(CHpxVOUMr2SDe`kd4wo^)6_gVSI?t~$@V-` z($0FMt&f}TDQC~p=t>H~0V5B>DaV(=y9x6w!4rU0&qwu^aU{%sbjkS0d_j^I;Fx zA&aM!zsrjonw|98ZJ!PeuClB4UgJ`Kke`*=9p-Gn-Y3QzFfdGNbHI@2|} z;^clHs`Dhuec7-&{qy$vYE6_wDZ+iHX9^Tz8e z2IeEPlmB7#E)@EsOa~rCS6)f!3%XB+<O8E!7W z>E<2sx7wnTfvF#yIVI}02xm@b#$7OwoM_0=54}p~XcxqBD)=t7)Q^S&;bCkB2&WH!MdEkb zI1Z?bRQG3<7ZhZQ^yrENPW|kZ=oWdk@V5GObnFYiLiKg~Lv&{t_l{ZDJJ>9C-g35_@`!YCq}1xa6&Rh_~buM&zBk;qc7 zk)>|Aw`C6v%3QOgq7_pr+K-l``Raj;#gouGh3{X~*$kyOhb^l_o~{K_-#bH2VqGDj zpa|NTQjLw!s)o{*`nz8FXcMnDp?F1l8SH&)p*DYxr6IIZNnUvyu!zJUBv-gc< zt3qBso++`DH@i5k=kd6P!&f9u@a3f|kC!Z{*vh8PP<#p1v4T`Z1Uk@mxU5+dzLCH} z=Oj2x=kcA5!Ks<~Z;0cE!wj#p6ri@}a%VMyw|-<-4WhbaAmY7x)biu_+vE?j)$XX=6J8hFny^H_mL^y?92eFB3AeI{!NV&^ zVwr9P4bxSLFR<11{enT3n$3=3z85J>XqUrl7_3(Fs2e4_;#bs)+hV1}ley(9YtiKN ztfco_o`pZ`f3;*2K>1b~Ff4bCOs}jO8gT*3A?I3EY)1$vWn|v!kAJD6Vre-VSii0C zk+(MVtc9M}NK9|RcUeC|Sj4W!3DhsBeH6$mQrnp0Ad>_13nM%1OEhZEupl4}>iVm)c(=O?IYj z$!@^gMrU9BWSN(}yD)h(-dO#K$J3q-F2;7bZ+CjG(1~XCrdskA+9ro%&h>nS6R)x>SbaCH2N(R5LsJCJAKs z4>BVcl&i;l7I}U=UM(gTCAF(4BMS|r6_Nr<2GB$Bu-7FM8}z1-)Xk2x?pZN$;B+pq zLs_l`LWp4fzQg8OebS@X>34U>SScS?kM8eA#Ds6thJ{`}(EA+uOMq=~W@OdMhkxit z(x8y_oJ^z12TqC;`eTLahU#>OX-bK6ld8%=03Q^m)QOi~>k?XXMV0`55& z+CQjKSdGu@@e7TeQRkmCpQFCB^W0Cq=hrMin#e1g0&-18OUslrE=k3H-qbf)sWE(( zb2sw3%BgtiuoJo)xmR0?UnKgu5EGlbzo=((H;ZG&pN6$>McQ(ZS!eiS%F#Mmi|yQb zIId5Kg6Z#;6*l@o`n`>bbFw{V7Zc1w)btr{$Av=?95ZqY5O%82=q{V~opE>d`$Qwg z71*fw2w}`UXZQom2+~ z!I)gAF}LrY$D4_ZHrsg8^HoGRoapto{msl{%gIKtkUJmM33N*BR9qU8fL+rND{%KmC7}AN{U*wu3EKtH9W|Xco8qDNw3d-Ly2` zciS6TmD_$<^T~1Ib(&Lp$<4e)K;XM{^1VfK zcKKZO21+cDN915cbBtCMv&=TJ*iLRK4ycYHQYL0%CTGeWu006eQ0R6R4gGRI-69)`rAu=1b3X_2?bEtye-YtW*d| z(IOs@X-n6PP2p}D|M0OQ^a$(rb=d{;si>-%lj4sA>;-0SFG$;8$wRuZ%Qg*uf*-?- z;;Vc>$&g)+@csM(^@;TE_9kRUya8sPE6V2j$o!)q8`Xu+{_gLpE$l@i{6wOoxdV>& zS8eRCgkq#S$=no7GZ{<*eYkyaZ%lqu?rZF#arN`epLLJ6O)oiLi(YqbdAL>eDs<;) z{o#Brl?i|lepF?|Ftyf;zRVJj)c3UrHV(QR$TPjewd}&71V8K5tnIK;>O&I;0+!_Y ze^jf2TJDByw?}Vf$z)uSE6q(;^MCUfou#D9>UC8t%Q=PsGvB*EwR#V-6)?kDGUJVx z$+?6Tig~(YaZ_qUKEKuLO9C4U3mvLzw^tp4F8aUh`cZd;EnX(_Fh*0FdXo$ZSAvFt zIdN6Y}Q}rW;eaC@~t}(iztt>fZ%M?ya`4gX3eD1!MI$KZ)a0y zej4&!dGEvY8GE>RqW^3uJM3jF#3ow!7R=Q=$nk%j8B3Y9D60cUv3tA@g z?|m?#5;OM7hC}BQo`09IybWlPvh~U1gPm+L?LtO15gt76_Hh<>^48x^(JKmGaseyn3l8D;z$0P!l33`=jw`Zxv%wC)4s*IPYanE#D6$5 zXg%s{A5e8@eHHHgResux=+p`81A|DG%b9DgqrPf}Zc=VmYX+*hhh;}j*VM(`OkTQw zL^$V{gule&C$G8T9+s)!q4c0!m*tI0(qt%FA{Vj$BpY3ySz224f>WDql~uM){LS50 z?DWr=#Hv<#U(UFHe^&yAD2fD3ba?1~|NO_S*Z)3l^DhhP=0vVES~s8bIN^d^3U*h@ zMO+-EP4riTDSK1sa>e%z^oubaGAA!TGv+z64G+S4z@INb$u%8djtg_)VFMw#X?)_1 zL!^>nPf7ZX%#xm$Y4l!tA5uAE`fDY*^pd8rt)?`FnisxctU0XRR(fb;@_JlHO1ul- zMLoH^{7;ge{?*Yn^4x@XrrP>pf)ZDhcFKQ)0!?l8^-ZKMiI((uzlmb zOMT4zc*sX>Nrg{r)j#dos)0{B`dY`1(Q(_TUNtt!0&z;=>9}E?RdzV+@#x?YTHtR)?C1 z*Us-qx>YPBcqT3ROWdBxIpN+EyIMQrE7XIJVBcWC@^t`jzdRrT8!Z@VR9lu6wjk}k z9Jv{6G_=%^dS^^F_ilK3+L?ZQy^iVazyO)She{i3ZCk5k%$N zEuZRUvuAN3C+f%g&Mf!ToK#1M#Ph~os!7}a!kgE3d(DQ)A7=ku!<*6oXb=Wx*k>uo z@o!nOn7`<>Rd-+J-I$1WL z#f6P_jC`^y(DtUtUR_B%cBU=wljGFzrNGFg^Gq6^JBI~SZe!wJT&b%ZO$qrF)a{R0 zf^l#lRLhbTx96sjLPm3mGd6D%k4I=9(Rukrjcb)j@BJbN-VQ#iy)-ofA*2*9E#k0k z%#h$J=B=+ysh_Y_gO7J$Ij};spN8!rkmap5U)%%ex+Qt>gl62Ui#S`xaZ7kRdp>a&!&U;B4=WY4xQ?X_*4L^ZUfe+{kh zE&>eAhku$O5gsBRUHJFEFcN2kwZ6p+d=OnggdaUFxUA060h!9}h^O^I*_&C>rzhMj zJ0L8+MrW-@HmmgK?$0m$2>HZaqLV2R-?PG8TTb_MGI14Op|`=i3}r!q>DDLDlHBu% zTj8qT(y)bK&vxdV-3oqKYd>JG#r5S4S9% z+gMC!RGb8farfQgURX^>3ke}UbQfxSzS9$_IeH_Vs3`Nbw=9mkr{u_&lR9SPR4R`* zVX^(tJ&>idnSq>HlJ1`DQL@^v2uKRu&FfQ;u{pk(HpyV%7P-XkJ<)t_9J`bT#VLUV zDJmxdtdZi;VL=ld-vudW>!OC{;}Rb#E644sBNsl;{E$f87-0-~*T?he)Pt{;qv+ee ztx2>IP|jZQ-g*HcCf4A=@n@BIGHWE*A)%k&9-(%WowWJmQ}tVc>J zc9qyTIE; zXF*d}zHJW|w|qOQ9Cf;i?Zl2XPH4Q5Q;^p7RT5_`N4zz^j4ZJ^p_&OQhiM#s#5)*+ zY#f|pSi2xvBOm2%xAd(x)nr6Tc&!g6sD9uRu17x+?>{TynS3QWK35`9<`d=;rUfgk z2}Q}Yfu$p~{NDg7+9c&9Kg~2`uJzoa6WmFB{9WSqV!v`i*Ab6bJ)iq8>W)ZVDsX(F z{IxXB3zUZ7Ng{w66C59as8%rrEw`L$-4fDxqO=Bo>iHZ1 zr!14=;xOlJ(Tafo5}p?b64j)Qw5?fH;Hm@VxKR<}H@B5~d&D_=MB|cc&s{OImGC`a z0CJc8y1Q*q7?aixcLP<`aJ62EAi};Ye#6z{Rp#!SQRnt z>xaRb5-2lD-8kmTJ!dh_F&uLKY2TfXS<9wlrR6`@3>{(>Dqvd2tNiY~$m!`cXpv+O zDVpuKOjuiFhc4wW(qqJDrdOHT7tRiC_y&NLYDVOg)Xv$5nHdR62H>g_Wg^T!3B)v8 zp5Oh3Q&$6HU)&_UnB5*ljxbF+azTVt(#?_Gh>Y7GaS5`eyQmjK{(0p*p|hDSM~^Vk z$0C;pUqDEP*hu^dpDmWf7&C)$B|{H(AmhfkctcY|%E(CIy#Wd`Q&I73pI71qKM4mH zOV1;(5?ug48gntZyKO;rMBUCm4|TZ{Iz0pC26WJd#;XjKtLuh0CeAyATg^TfZz!|u zA3ULuD|xqexS^7FW$GXqx|EY$No^J;r-q-3GfjjaMMcW9t)_^d!%Upb`eN0sdOiaQ zYs~o28EgkT|N7`6=#aO9MLR&&8)E^1B}S%0<*9DS`~0wCRQjaxj-ncy&h(vDjV@Aa z!t`BASL^-Ir_ofK!d#RGU7k)Tm%GnEL(F}aPe8L$fevN&%K z)?0B%@yrH$fA%HjvzfK2LN)?hng!KV`NK*@Xw)L-7WLQ?Sg`q{p^^N(rlHNMZd|pH zxupebpVSYAQ3G7Oa<&ZgVvor3dL;GbVHPMrf{)B*!Xip7ml^AFQMYM}CS)%n$O@nIGH=uSG;o z9$$kGEAhUQ_WCJhQum*e6d{}-c^n?SfZuAvk`>46pZ^xcpq{p614-mMsfU0uO)3t! z-Y5*pKSs$PfaE(PH9e5<8)f!eWaH0&Ku94d720z?_#OcYq|1u^<_`;H{!IX+jdy8r zpZ-2}5hUOdL2k$knO6MI2Vv2UVdp)N#XR;~ql9*{oDs~N67Qo?&w>J8dVlsL z>i45V3x2DB{P8U4AA_*c_|5uO`YloNPfz-b?zm+z_B*HJUvwC5@WYfd{F|3t^e+d^ z7l&QjV!QoYrsR)jVgDQ%F5owf+~v25%0E6S_aBC)lM8a%+y9T8Mx)|?PMcNTYQ?b4 zV2NgqAltbQZ2Lq6p^8en>}hnobfJ|$A=y6*0ko@24+KK|{s{qO+`?6l#z zK;|qoJi7|N0a^wOT;otXkg*Ql!5$3(vG5>uhzHR~&kQ%}27+T|O---#SY6~$Fw}F? z4T!~DOlGecWdyH@ci95YGPMrQR1%dl$T=RIKrvx2=I4gS&p1FI&Hr9h=1%RXL*NGT z@-Hu$&e}xAAuSD*K&@g{j?*`$7I{EsWka?AIMxCtd8%73ougf#8jYjfi1@H=+k0^B zx2=JGgja9Vi5s#&oZ(Moe^!sw7wH zSOQ>RJ(2-#+PW8{p1Xg!c&%ka*wI_W-ml;MD3YlK`&%t)I*Jgu(t1!HR{S1eNV44l z^SfRx*BUS!E>s>5T}AGtoK19kpmH=$j057sAX@MYR3hSdzHsy~6I2xG8=7;ePYuPK zfg}C)D;l$)xZ<_xgX?W%os&A&v^DAeroOudt?SRNe}z$wfD>8qJ-d3c+v6l{@M&Dg zx}xa+An!e+n(E)RK~xkK>Agnjf`Cd@nuvf9AOg}$R5~%B(gK7;L3$GqP!LdxNDaM1 zqzi~hmrz22fb;}3L`d?y`+v$>b7s~%=dAfOYvu#x!`^Im_WqUox~~E=9+xCv;PGZ$ zY$A)jYf6^CO&)m)6E+n43b_)#Vw4%G`Hbaen=FmDo*2@njAQL{JxNs{S=*7(z4$YT zUIu3bxwOcAh4ZAj!{n06(WZu zYD21>s1|ys`{~#am^RnOlW!S)pI$lY*A`tXXDhxebfskO-KsA@<2KcsJGP|1-|N!e zSdt_MXTT~EF&zB9UA&Wj+*o28*?idz7brwMwYT$YX?MD%F8W!WkvmlCFZ=lfSUa7P zuxzTSaj^u3UL#5k)4c~Y2Yl(H6WVB;7Gq>=acqu~(sB?!1H@5L6}~kd8=%HrFVPE- ze54s1?%1rrv9>Tynip7F)&0HlJ+aBqI<6>d?M`aVM(lC>nbP&x z?L1&~1k+0!@pCGK_4qDXnojJ%KTO;VIckyhckRnw56kpR4a%fnrsu|2+xOOe)4Elt z5}jG{8KuFglY(ck>Jc%#jtCaOppJ22ag(7-_DawOFIPc&tzlV|)Dl>}NX+Gu#2s^1 z164B_s|m)Q@5$S(Cm>A#JAoNafA@PVhJ;il%_h*iispaiOXv2x3bfZv5fk=$?XtC! z#y2Kd(OKuf>Ey4{3*qZIzd53@B-7NndcqkDX{67UcS>7dri3);Fwzlc0Gv9Y0m}F~vMYg^5?JcDMUQ5oaQi`v1ya)v}?s*D|Y9r9|sY3^N z;~tWtoM-;fMRo-EN7om&`bT>fcA8wsrEie6pse2HuM0C?J2t?l^O4GR>_9W0Wn89K z^erin7S|J1&(buiy5GCb&v&CeSOpTl9#oY+v5X@KS@t~N9BH}Usq24nW-wz*k7R=p zr8X14wJu}lpvxcY$p-o)^Q@={VzSZLy~v6EABY&fmgbaiL*uypJJHR0AKVpm z1E$6SWvX2BPH;Xyy|%JT`d72CaWsS1ywqt!qf2Dw!(?NkKzsh&rI;HzJ;>ZRcb_G% z4i(ZiAp6@+Ax(&RA`t5ILpLv?j;vXG6Zn*Fo^*R} zD06`0uv-hOpX3#XS4G&N^s_vXY><4^HQ<@JE9-JnGX_G!SbD0!{d(*S?$IK9zzWc$ z+^JVBd7xSXr&yVzbC_?Ld8b(0{xL~ltrD7`M{-1SBUQfsj*QdWG>k~03Dd$|ASi51(PGV6}rOQsnNyoZ-UU7&8$iIq!vG;&sPJlE z#--*-hANDrD>;)hL!6UpYMWdVk_6wLsVw8V`pF~B<1tT+=mU#_2UF_+l!L|j-=pWF zy$fF0@)z0XhTXc|ATNMj@Q9zvo)z^^{g3mcBjDY|l-IivaUPu1(;A2yn^9L>Jg zFS!hp-fZ8wV!QZgUcvOf=Ebh@J(MVLaU3;wMXb3e8ANfT-T=H~%*jSySpeb+3`Dle9Ui@mu+v zQ#(O&rvY>!<>R#=A1_I#>k;XXz$T(Y3fH8vey-zbz$(a0q8bB3hv?typIA#x?Y}wG zP8t}oud8jH3A%)q-lO1YNRv>hS|PCky+8>>k^X112R0gz@H3?&j{g#8H{bh?gjLc6 zatUK^$*d4+g$HSA(m=RbKs_wUh={lML49$Q6Fwa(tarKTxr*!!QGpora{ZB4GOz@B z(KD1fq&l!)E1jB?B~CHAGkx-(<{~-|SYS7ANeg>U&E)K=uspNt$Hd`!@W+ zN7hVDXP$A+fE-1H431G{F}=!(c!5R-H7qwQSb!T3HT{&TQZh9jX{%-)6n_~v=H_YrBI4$H?GNTX z?1oy%!}o;=@<#u6o#?3(;s04k&8Yu}?ZW$$33BiMM^-V*Chx%J zW~l1*OTAL@tk-LF-zO!sJqgH(+}t)a`J_Mk+fHj!;jbO=I>J%kuhTqc8KR?PgB5&V zZh?reaE98q=MS>r{OJ0HyH-L<7d*_K^(3w#Y5bBG6OQeA0I?c00i{G0St-EXo?K3< zp=x}tQ03XY4jX4~nX+0tYl~AmmdieKgEsKWXt?E)8;5ccr@}RY!2E!zr-~to4%!cAW@!P%Qgq3FP!O)Xt<`S8QO*m|TLNO-@}`%_nR&B-YNya32#ks^w%&*-rolv&K$Tz_AKy zuny?)H9(?Ez6XWF5U6p&2Ma*{>5&9p zXg^(n;I@_ru%vPAOT`A2-52wMw8U;}y_Jkn2<*697_F4!Py~^Vf0(7%3_DNw&`aGU z1<7@r1UyqP)%{(aS%?Xp8`gN*Hm&tzx9=(Y$!J5u<70Nh2POP}0mdCNu+yfpv1w)gfHpR6`i|hA9Ng>& z4xe$l->fDUXq2ORL5=X*)^`H=2c@tL>xDYDR9;5N#mVqJ}R8xPrW>w0<5N~b6Da) zBVq5X@=MPN1_6VKHL%-qfSKM)0Wal?4;Uy0UaBub!gz-3P&sn4yh9{iXu%(2}qwTiz+!INLFE6_LcPZ^hgVGhab+=%JgPQ zgyk{T5G9KMGgt+Y29=2L!5mD_sIeJUhd9QK?olj3Ot=_vPq<$5Fi!7%kR~Z@UmqIl zZ9A}L)kuW009jmEPTjKr9{sioF)?pV%_c^GSJX?F@#lVx1xq{A==2GKI*r?k`MpdeM9mt$dS zcj# z46#$-prVgBr)jSG zL=X;7V9&!)=RfDLmwJ3cWPd(_Qu1cr9a3iUA3Y9{m(H=dv)_|?<0r0`#j5WN-#*}P z$$`@|oL0rg;dvNBgJh2TQ%F{hmwVedN}r%|rB@67N(@n7sBtR&PPzf;t^DZ|MJwp$D? z_%f_6^zC5;@SHiKbCeSr_L!)NePOnFL`55Ju!}sA!rSpi@6EiRvx=AKM_-6PP24FviY<{h_?lDh@2Sn`ZBMVn{qFQ-%=eO& zFrnn&-hN99spqCRXS=OK+@3-4w~Ql2n2-L9PHfh1v?X`& zK*!dMjtsxbwpk|BfbZ4BwteaKpBclRKNiGA zSfZer!h_vdTsG0}rLwA`|7YbwlTzfx8KjW^ny;<3$f)bjxtCmj12|uOxhSLYs%)!6 zcBFk5%1pa7t<84~HYe_!GD)-`jZl>>CN!MhOsIr-1ZZ9Hl-9zA@D?cW;ZA(k`Se;M z>nPr&D;7Hf%o8W|;h~|rjSpUJ9M*0$Tg+aIj-HFFbKCj2u?qEGOS=WSTJ3qT zV%O6`Gc2R>SdjTMLWyvB9Qf^TckFZ5!hu`{;%)E6fpcC=O!`vnuw}qC>O9u_ZqPqW zg0vf?=BRHKh1Al)BbG!SC#Nm@M={@K;A*>W-tMo=Jd-PbJQsisn6Py)`?4&j4vgxp zq*NjW2wD{&j?J6-on=rlqjfXvq$Bv)p(tg`BI8_{lgq;0VVU^PFZhaG`h(M_H4oXn zejIz5(o0Ym#ANl+g47~|??LdeZ^N64>MOPlp&?KsaaAJ|)2|}0Qia?zgl-!KE-6#D zNMHh`E1#FVm!xXWb3dKuNmKSA$;slO&`8|RAb5xP(-e*mm{B?_#hCgUs8msx5aE5U zf{`x%wp7S_1PshQ0+(#6c=#|ly$7?#m+H}LaNJ{iz1#j8YuZ8c&SPA5G*Xg`Cd8DK z{6a}hb=Z}AH?Lb}+;3ux*S5gI9<$+stpyKf7G3klsyxf)_hdwI*A?7SBX6kN(t zj(4+Y-sTG`p91+Rx22^yVQ?>T_xsGx=m1jYu%Z0B8TXB!nk_sHA2ij7l` zF%9AH>w;F^>qc)zVM=A|^DjB$R)$TV%cZNzf{akOf-@*-q!Z&CumnVW1opC_;$md@ z%Yv?iE6%=QKFv*#rJLx=*E?SbVYR1nZb$}J3k3f-6hl)!(hF4eVwYs7xh#pl^B`c- zOGWihxQJ~5Hr5Npq;b>a<3_!gc3oTxsk~3wOlq3O zZAD1lJAEc29vvD^`m1sU!2;l_{S$<} ziM3O(XHwQ{PIo8Rp0{!H*}lGb6!kjW;P+S`NR)9N0qNr)=mo=isMJ@(({L z6;;b~=N5R`Sw>MJ^tWgqIDnfc>k%$aDh8!;Z3G|LMbT8mzSMWX3fO+lf|YTZN?<3} zinzr*+n-F*G1r$*SPzNQqux^VZmDtLoCsiV6o{-5*$9U6Zgfe^R(0sH&7=kJOd)21)p-I&7+@yQ#IE9qRb8k9kT<|DK~dmE-j0yFuVqP><>9*SgmQxw`rOb_3J;T>F44lfMvxg)sI! zW57{15efQ8h2Cum&$Z7s<>wsnlWaRSD9$%>@zeNR*BUYBGH?D2&)3C_n!qW?c3na$ zk^Ezz{o~Zl`QF_e{CpKW{c_+44Ca(}(_F&TrN8;DkfD4(ssD+&_&4QG0lI@*o&VJ$ zxAgyH@%6~ze~WPZw?>iGuz`FrpgW4~#x=jB{ab&-Va#W32fzCFmv!($(#nkiqX<|@ zVfpo^zTiN!!V&?2dOb-!4+vEjzY}^a?}Or{h7iy=*&Wmg()4;h0|fjrqhY;<`H?5H z0-O<5>sf&ZZ}IC3Uvs-$nfOY8sst$TXp+zxuVnNiJ*tKF9@R|kMk8nkvptISI`|j~ zFYr7z!Kcdl?WqN{Yp$^DKw?1+>(9Gnk$&Xt&sFRSaeqK({_1-4m7nfhBgA)o`<#yV zd=>Zci-_}c72{|<--1ZoSkD+M(v}?4{*a&m5ty$$Lf)MI$)Enk2_&)iL@37Sc1BXd z(fI}Qw^N@sTT5i+MjI{O`O7uS4efWkEH+o|&8&}E5+2v(NgLj_kiQ;jRc2-TGCirV zwRk(n#0TaC1}V{=0%dAA48+;67cDMNwOsj2<07*X`14g4mHW7S$gcg`Y<^Aql}+hg z6QY;r-wMiwFsti}>jQB&Qm$!$be8aYWpGyDXfGNGn9j1YI!`M#{P9t%pm@JsIbV;N zOM7X+qVh0No&71av`dCcAVIss2&BOzZI>b+$5+wS<|T`?tKTl(U9x>u)bb!wb8&?0 zdAY?CHaD6q{evzy(wk}t8Yp0fsZSu#Lq64BS>ChTD93YBPU)hR%EFxADto(+ zpb3Q2{KG_XCHVu44;CjX>q_|BO2t2xysuoHo=?m>e_LXQ*Aw=!7f{oH=f3gE zjvb-lh>=&i+2r{8Cj7O4MvYWXF2n~OT<0i5pZMUEdGEexjpmN?5A8irOwsn$g9ht0 z*KH?=F=D7x4xgp5>PxX}olg2vBUt_qlYO6c$GcG1I+F?$M@w@f+qKNrjK;-EBoC>c zanAgc6SWB~h?>&l!JuTuU<6hSv-g+4Ukwd46V-(DC9}WJOg-N8E4)vO^M8}26uk43 z!H({hLZV6YM2#MdAfkRnXFdLR!Ap%0{|_4i!rJ!+`SQN$J*z7|?)~Xef>x2|v$4Ey zx8*U`kBzo<)lMXMtj9Ne>VVwbOT8QZCF}33-*)Bcjvn>Z7S|{p`TSIJ{qk%a(iH{r z7FQg{G_D@Jn1t^wXKjlFyJsKDul~an{A>6hCgtJ#QITvR;KX{bv9R(9+h(aNnI)j0 zXE+V3bid0QApt<2{Izjfxdk}{tG&Cpq8xhQYWpSC zyTqv}S^Hb!!dcy-x%2nuc!v<~OCP|)^UMgXFE@HJk9Y<^4SGGG6G%tWVK;~$7TlyO zLDuFTbMsdn|L{KZq{+9I`?~{(QN=PKAk)?YtRXCyz0dc5Hbj_a zmwCgo#0!$0y_7jClo7M`k%; zt}-7yO`g_yW(uEKkhozZd+7E~X1yp3pTw`BzC!a#duEGe+hq z|1c?qt{>}VP>O!8kIk=zrbjI7te^Tl+Y91n@FC{=!cwC;gotnjyXoJNhJP|Mf2u4v z@xe<+lLgxzc3HPv5WhLC%tP~|UR%a+84@^z5bcH&K|VOCl;M5GStYTLV_EzfPx`vp zmvkRLH7(#;yZ7>P&&OA*&Fvz`<)#|Q=3wh|yROtBhbcm3Q$U`{M09e}E zy7re&jg79r&XtvF%{W)EyruUXDv0$O1D>3V7ns^gboz&hYe<#9aKc3wT$*5?79iDQ zz+6)lt7Wiq`c}0h1z_h!x*>WAqB+U$rnAiqNVrGd}j|6#{9`vc~ZvV z6QmKc+rQ;r-MOfg4g_yFE)@KMDp;3VTGdC^9TqpV2csxW@W$}D&MPPg4e*zZR09*L zLi!n30MmARH8HzBakeR%n3qyEVID3lS2Xn@oxiOk0e4T?%TmVVQPQfumy{Ad0^kQk z!VLWVkF{_Mh&~ADvrU_k?IdF6^u+CAPCt&h&->VorjX!4Jx3~*Qcaa!##8S!!sy|U zj9!LvIvLnvcSIelU-IHH{d$l+7bEW`D7#MVp3}u8R@H4KyZ!z``o*swcg~q0_qsmb*96NI37iiM zoM8N(4}R@gy--d6)laKxiv8LA61|*qof>XOI(#1FpU{?4N($*g0n7BuQd5$9Ti~UY zcB`-P(u@Pi0ani5krPqsC+h{OM$wFOZ18fL7McA;3C*Bo5(y3=i?0xMqpRjOhDE>~ zx!# z=*8~q*Mud+R3ElGw>9BAmgQ~UIC|#s6HBF;WEQENvGZJd9`&K-EM6HnUF4U^+@neFd{dc41N=`z0MM3+eeH^&K z&t2Tqv1QDQBVJr^Svj?zZE1aKXD5sG;&}Pp4~eQ1=4+lyaswV`e{%BXP{zb*c2pNq zISzL~M8~UQ*$u)kB`p5Y`clPjCpi2e^Yt|$p;MX|k*kA`t~GXcNiUg87n(RKWJ(pL zn@D91HfAqhrONB}UZ@gju6P!b@eJOoJ`ZoM&^l81+K5d?vuLXk1!>Ym5WhF&W@T;Y zhS$@G3n6TrMj43Ycj}{Yd+ma_;&rAO-hzhg?LCedcXIKPHfi5`+*Rzx-n?}ae-BA6 zfF;L$ZtdlRewCI}?;f?ZLfx8vQOXrY8&i|8XOVhk8#k5OiFp#=4qW&3>J`^Iqoq5J ztslpJxlJfJ5PA87ePyB&0r?l^C%zSy8iQm5Fh7T>hM3C{Ft<>?zL~G=6Vi9*-kyD6 z{FUWmN`phn`ulO$%b{fa=X_BBDR(2YT1_HvczkemnyXr+l={YNXW$Y$_zpCBhI3Pf zZr4k^<>l?h0Lw=r0Gf}U!~^-NQ#)#TO56OA`vdMRk+;~-IY?fqiae{SV&QQ$ ztjgNvC5FV%S!9^O(w*jS?I9iy6xX<#6?=XQZ8+S74q{&5mTDNF@eYpq>zz+JhnUL>X>6YF^&7= zpsqBc0va^>C=iwxV5u_p{z@gc=5NhP*V&=;FXya&e_?qO(ouG3Jb|vo#-TWnVo?K2 z9MfulcmBjP0P9e&gRf)e^v@CL#nq9Gm64XL+sQ%f&fw>M*E;8Zc$VLgw-id$tS^pou z9O{4JbY@EZt6Urnq*n{u^tFsW+Gm;hpY8K7c;Lr$kODLPe{6&ZSUKMU{qx_tDOW1< zLbq<(#RQ32_h!l7pHi&y%$gZ>k|UT9a}qVEX&6 z3$TICz(KGoYl8Ia z<>(dGDaIEdFU$+z0DHO!EU)(`pZ-1HdFK1CW-jm*mELe(aPwGF&)#v_eNr9ss|PXq z-FkN8_tutH<$lnlfs|(YM~`vEjC;wBjPO-zc&J=kqAcO((_XFu3H`UCpWT60>Wrt~ zeNNaGC=Oe%k<^Ihpn0?dRE^m8aId~Dx$l42*gX*34vK+aR6kc&JBPZO)hxhm<>#1R z#0b$Z7go(=$j%k*v-<-Yy1&$w@_E?IO-f18rnpXQe^Xm>CM6aA(crwf=&wnkDvxRH z<0p|^qM;`DPIDW*T$$0najbz@zdeqW4)=O*2(PrK-Jj70qg9{Xk`{*Ve0$ITh`{i_ zj`O}ha4w?wvUAy~-{}b!6(9*+XKF1$PqcZ5k2Fa{$NctDOI9(BvY%*_Wd1%VUAP(Z zeej8`S1J;)Q$Jw#DDFOO*~)hS=>^S&3@-7K1iDAn#e{W0cBHj>j5AE*>d=X-88M4V zOZ`h80c(3XPS0#uMR%l_O8qxR`Q(dC@w3nacI$mRiaz}fN&%@u#=n>+8f8|`n=G@5 zADACMK0MN+m{nab_JB*&H#%(;y-C%gE3SCY>AOj$!)8!w09<*6yo(_p*fx`B!Y>)h zB*XkP!e0-B#%MUR##}PE=Gr5li&_Pbc5%;wf1={X&8Ok`qg~O289f<>Ch{V^#H)## zyF7LVp+hqe=zIBncV#M?Ude?6yZQV32k?%lR!H99KQ5B1tb|?|Nt(3@KUPGvTeTj$ z77=oNexj2k`&0JS+OJLW1y^-T$4fCWLH-nM&u&cf(kp)4u>31qO*=&$AwWeD=mCoO zDPSv8U_m3Wx)ker%)XJZdu^7f> zd+(#H{IsonxhYxF?u)0zjf}Tv+P=R*7JZV+V*h-bfiPwk`eC#h5<|}{ENF^|{*pTS zFJy@Fhz#llAVl5i#;WsoRrlDb#tscKjNO@|a76>FsSj=T&E0%${9K_4!h+1+MI#rC zSi|sxkh6%j-j(cVU3CPy2eQ)6reHiB7#jFRrZHh{26ot#rc@}9ZoqoRURVeX1QSGl zh#^29jazN1?+%WXq>lN|jZh+}Z%KpwAU3}a@WTu0IDzO66EB3~nsLlPgq>wwdsBVt zt8cIRQn}lzKTB+0Z(Ya2^j@I(J1oh=W5m>`?LT>5#x112BW6mNDDF-Om+8Stz0^m^j!<9Inq^d`rEz1(8oLb)^cKyI^(YXcDjSk_bLoj~PGhl-vM~)l)k@3l zd8PFY9SJbB_7&Ol6SqGaCGcj%3^KEQAO#FU&+h^x%rOJfMY2tEH1g6@!qzDJHz&=~ z+Kx7-+kTgh&C;A&^t=$B68Gkv`4(8u(}fFDeS0%OxdnB5+e0h(XrS)sxJHDC`tX;D zXDn}3Fe6G*?6;LLY9`93U+aJ#bKdL|yLajSXPt76Iaz{0|6WW14{;KstsF%7hl#}} z$8fds-sZtZTO!VREI_p4Om?U*KY4r3Kr6A|c7f;A#g*UX31YT;s1L)bqi9joH$Bb{ zJMz(9za5S@B!?Hv6{yU~l4XBBS5faa%r4hWcW-fV=Uz!j#=&WY1G@=yeQ7V27pW<( zS3kX-)iZBH_A4secv?|pG?Q)F)7>*L_#pI7(*2#s>37%Fepm6no3{liTkk7S9s@q` z#v34zHtPx~jv?Ch7Jxo~1gH}lvD zDNP+Bf)cJ%vCEj~UH>2_y3C;YvD<=A^G)fgCoU$}nx`LQrp#&$JKDgWj}TE)zyOwDe-BSk~@itorcTH_>es((`Uk@Ab6PS+`) zeCg>Re;yb^wj}|9KUP0*p7eVkZPUy+++K3$AAYoSP0+u6G(@i!{}Sm#U^DLRVkgJ; zvihGlCn%o>Je+l#n$_zg%I~MM4f$U5dNcpkO{E>O+7j`;NPP|TLF1G}i;j)b_O30X zk7$47^~(4|@uqrZx1r>?9OhyxH6G(6R`IbY4gXd0cX#Ch8Qgd3rWdiNg0JOMi+tCE z&p^V+98|}A2|ze=8;u9W{rbA(%IZp6JBr;wAvKKTES^x-K z{>v3kEqG(t|LbKyq(*H+Kz#$On%GjHH~OCAX<>&JgTFKm`-`*S@Hdd%tBUTKA`8$@ z@-P^{Ej-s5NFeR~|NcKIA{N9!T%=j(jsNznq62j0)>t3@!({%i)aG>(5rVYY zbfIDT-yC=H21{6PM-%%EzAhsp`TD_pHl`;U!d5pn9PmGC0VuDNZ6_*-??i($`IaaL z}m`SazVtNAoF+|57{I#Sf6)gV4q?Z|jb#v4mXgJaR9|s!$ z`*}7?^kna2;qNS`|MNi5zn^FqPEjk`hTRes`2WlW2!(@m+%99FAtKrPR7%ucL&=1V zE0r_4kqr~m`d|3`G$X$=r^zt!dFE6k1pUXO{%`Ff%Z01U9)8#exj9oM7c=usHs432 z&{*)w*QaAlJa${7x~k|QF|rr^1qPt&7|gtWP-=BG!qF(`Bf-Mjg~w~8>m-00rLE50iZ|OnfkcKlSpLT zv)*YFLsQzQ&ize8sU0yzHkambN6K0^A_;T$k$3A2&G}5VqUakY4GWE(=6n<O_@ap}T^+vAx5&gGs@wd8Dqqxq2!=p8i4V>P4>b#>(->1dviT-`R?=GisN<7+Nls#e=m?{Q}ldAIGz^(QUTbaz=~`VT)9h-X5BvVJ6X_IS?0~mzKgzy z)Of3BAglaYqT;F%-*d3b33+^~4vMOK=Mgz<@>>SS zW={nieusY=q@<<2Nvo-Q`DB-1F+GzV`={LIei>0>e$7d3&v@>*Z7;}5CumxsT{aWS z`D>zhbE?-wdiW5snB8#L&{??w{+OfT5safaxh7~%h_uTSw$0yQWncB!Ezg#Wco_C}{T zj(x^+c!Ssj3pjuAc+bL12S51=G(B-PS2232vpZCrdAic;>nr1?>&5Di#6iS7d7Dxj zX}MOlk;^7NEPgh@_^yV!;P%GK#j!qZi3;1K%jzrAynhP|ch@jqaS7nL5vRs*g{2L& z=_KtoiLE*164foB+J{WdB2MA^iyOZ_Is)J8Hyb(Oe|12}bdR<9V$x4LKsCQI*t3U| z?ct&B5%>#n`PuM@fKn5M`O(LQ{Y8({g1^QMvUFWVYooREMZX7vxB;b)9qK&tHaVmp z?r)RY58?={avgK_ff^**wMW_|D0o+SFb%@PE;)RxXF04hm%MO}36rNQ!k9#hb~unK z2#{X?1S}uTfWW~*9V4>D?`Rtk$IG=tYI+}KRly%Cf4l-;kuly34rob|v?VXwf6KYQ z*{ozg)RbeoS&?qJx|Ts-N?Z&XuL|sO4wt(Rc$JNeY8L0f=O!%uoEK`L9d@Ywrm_tT zh0PEx;kdq|a%>I*)vX}F-DqC5t$8+qmHyI~G@X?@9iAOis&6m0%h#JlqoxJ>thp|^ zM+~3Em9uPSZ?jb*EveAsK;$!eD!|uHMi1tTld7r{%Y3p?((M|}-#fR@saCAL(zR7_ zV>+)OikH39Rr2G{BA>0+wNU*# z5noxqsiSTUC0Sv2>dNn~YMi2l4|!E{I7wXGu0lg_vkfx$ai=bzZ3d^X`CG3F#7sQ^ z=U0n0=YI=hM{Y)L(0BV<_qJAf3p2-O5zy zu@@ZOdSbp=8Pu^s)?&mv{i;DPdaR#xtM_tq=JY@&rBOUEqwZC3_6;k2dIc~N4QWQe zJLABRp|TQ{0jP^``8!Py{U|V0mdvax+u3kzIg{Ri}Ps~WF z*91S=TJWwBGbYS0%PlSL(uHS^ink z_%@A=G#IOGNgTE74|`ZkvQ)rycJ?@{=k%64OBpU{04x4$jm9wLXr$$P1it6UKUrJh zS#z5!2FA9N*W-Sr>F}&n^1v-kj|WD#v}gBaACmTZmzXE?{NQKy282hy)fP*C<@9mP zDp}K^7iXWyIU@3(SkF>fEnfgq!a@7r?b?x-rcr>J56#IqO+}jmTx#;AB)27M)Fez{k2DA9dzW7vb6?jb`XF9;JM5@fFqOonR z)8DIbX`NpnWvi{mK1t{OZ)BZ$f7?VcCfpn6=(uIqm_|3Xgnki5>eI_n!oYiWjmC9s z?FdjeQmQIxkXa&$QKm+zvt?5-i`*pZH|Kk#Rej7{zIT{1TI=L@Er}AwGN_CjGo|O~ zs!_Fiu>v0^s{#`j*%~4sR%B=0OPw{x4o`K8VCvt?%UXVPfnUXo&!o!t=hS7#PFKlZ zFcVIOc-1?W4RB9JNGst=!Z$IM+INa}%LtzD%8tz^$9t?3x)SXNyOMD@Ei?q^^{XJS zQBN%^U=u)sYKR7%GyO!9JsCT9qqlwDXAM37zJ0zNf-y68;Jh0v-+S>`n4!jyr3KAu zhXP%52KAVf-7h>A->3j6&Nf{I-^|0>7Ml~`JN(2QeSTbL&B=$CoZtL-c=crZolHu0 zysjiL2y4A|&XJXYP#E1yh(w}LD^)*W1?*3~E8|q}RUuWVp0lJ_sG4xG?q*Lpj~{Og zj_u~cX}QUtHrY*U>Gf0(K4B|!wZMk5igt*F>y9P)BzM`EJjajy{zar^My7qkWPRA= zxPux-!|lj;`VU-7f5NYE(be*F$!lfpeT_RQR~_E0ykk07twAvEPmKle5D-uR^D=mS z)3#he(|2Mf_Bn^sZP#V=Vs~^=$=Gg6f0W z=#s-u{aKW}Cv29!XBtg+u2Fm!)fhDlKH6id6S-;Dp@ZsBX;|4$ zpj$E zYbtkAPnib9_!(z2&_iW%@6Xb(2MlxZ|5u(n+4BNy_;01tzgNott&eN@Y?EzR=DW7w zB5r4=1xe#8R&s%XPh)JYyO~8f5fYW-w}Yoo*^R$zj_hDvE{ItYYy!M(tHcLyjaN3) z^Lx#kQ)XsB*S@CK$z@uGXsM_(jc`MxW;4A(b+Oy61eahCFV&6E9Nn;EvGfaS5CSj4 z8ES_}7MDr%3AqaqUa;ghl4G|=k6ky>z+S^Q!rx$8+p=^MB}6WzydpK_GLLIhK%|tf zia(b=A-gHm5f|CFG_Jf3J5~aoD%X$i#Yefa)RQ3;*Jffk_M8Dx$11|#A=dCND57APp>^&&FyHO-s++s?LHgfh)qv;&0{%lSX%0r3o5>8 zx4m+#{R=u*@FV{eX{Uf9y`7Vi_tPhMoSwc9?{X*YB!xcf>R*(2nCm^Xe4nCMkN<40 zLX^S^1mc;0sWTMva!l0T{N4(}dLeh+o^alEsCyRTp3+FrWu=!P1IXyXRKi%4D_3LJ zgmrI@U0g~@xi8#pf9syPVzI7Sa5Gy=!>zlYUnPJayeglE*5Uh8lhE8)_f8OTJwNTz6 zZasQp6LChz*qIda5(y!Mc5o40{T?1`<08JEQ>=WI#GzRDp=~5b=}jF@(qWnbb2j$&)Ja!w@cf#$nvt;PrV1z#kDto?gMm<-{gw ztBIEol-a4-#ZON=|9zRk_VIZcD4&!{GLL zczd9{s+{4_z_|eb!r6%9Ke{s{=x68)Ji8j$C4jIO6{y30G|;z^b*L_kW%x(Xpt{$Ze)w?R4`Yof++_c=z9@{h`f$_;|b^P3QPN5VG}BlT7X(oA zFMmMsQ4h#kcSwMfKfJ+JzOJ=vA~~g=zbKr0{)BAiC&ywRiixXahg|xDv+j=s?b=*u z2=W3R6oh}ND?+Li2!llbp4Wj`PkT;fE%q#xbSKC?@HVZ_dLHTO{r$33&F9l}7@q*& zj#O*#VSXZ9ZL%hxgH;JOnYe~ubo?`G9=;j2o}kN)lp=T47g7^}V4;a1xIyo;;v@6v z)z>Pc53jgi?`&`5kI&4!E2n>2fAe%e25J;r-#GX}hU%j6yyNDQa7!y}PPJgW-M?06 zBR(T5I=#JfYk3a6{Ju+(T+jah(wl&QfPggVz1PqYklsU0DAGv)A)0`Dy=TwtIdf*d`OevM_J4$#%w&?a zp7lJx`@XL$2Hh}3^qqo84l{%$c_qs%E-%2QAgwsng`_pX&SnH}fj>EfVq5RqwmJCS z_nkuhM!6I}qeBrH#{C86DBG&-oUrwf!HSG$1W(+FCO)eG57HX<*)w3=6K1vwE2b+m zT3OD&&nlblYsB|~<&ojf$M4B@?7-L~w9=D_j;pRnE@g@DW59u-+cf>@cDko;W6JKj zwQJ(6{F|X(>HBHyzg~tteEouyj3=H&5KhVw%5$ph+h|!-s4}#0?~-HkH)Z`SSHGw% zu?ZUnQ>u1^NZ;h6w_E1sl8NFfDb=p8R4SA&KdO9_L0rx_5i`u=l%*&r&)qKT*z{?O zA9v%#n0Nmpqql*j265w7Qtc%oH=y;YjkQq%(~cfp=^G^>0KA<#$x}h~ds93`5U{tX zHRy43pXyy@Q04ZAJ)W*RPdjiw1r}knKL_T`Q-{QdGoOBBi@5Q6;rs#c;aMtvt`8Lq zs*TI6M97EDu3Oi&kNXhArs0mCs1DuN8j{XO;Jo^yqr5Ku6Ai7p=hIr#Wk{$*R4S5D z8IF<1=_Sr1RO`ZQjR~-)!LZ`f({@Jd$a0Kc&gw9x_uXRjB+rid)SJBGNGoWKm} z{qvuly4ZP&M=J!N-QI5UweMah2AS|RTiPDk5`%7`m?gNWa1 z?1b#jzm$2Uur~k=1TJ2;7*N<&{|D)<92c4&zhRI(2ohTVU}ZvLBuM!B<^ke1jVFmn zdP~)e{2yop#lGlfL^sYlF)d6W;tN^^T|hr6*@q}t&tBBE!i9Gq(Gf^Hs_b!k{;NU9 z=Y0*Ib}B>{U%gA*efT}`pn{UtUTp5gSjOg8;p*JW1yyN6j@$P!qMAP~A#<_E3LaVi zu%cdO3XN7VRS2Ht>bpPU)h$fV-p6>w`|fi086iXB-2xQvR#1*|Q;}gPwLe^~HGjKb z@7I2IR9R6t(}1GvV6-4907WhpCyX99i0U&?SF4)rrXRib%mM zn8>4x?2fAh^Ug*j4M}#boW=k*^Jio>8D3~T;H0pg#$$T3dChCcfV_i;&`#7Ky_)z9 ziGBN~+Offo6BiyXGom%Br)0#6u4C}81tq5sx%Lche%h!lv!+~?4p`V*5xuDR4ctC^ zikI#77@mMN18SJzwqi$}@0B}~Ha{l#YBXFnsuF4Z(#*`D6HPCRx&l-h?u~f2L*y<3 z|Nc1ynfENKIoH{=inYECcpYe#O_aDCu`>7|p~?o$WXMe41ycrC(&fN)oHwQc%O8^s&=S9{f*UGE^2nxvk10QEw zMXsTe45q{m(su=kgTisL;<3}Ej<*I`D2s{Gm!pcSAL@?6I0oJ;EBIo{?`fqEfK#6~ z1aey5Nn6N$Kd{+vnHJ4rlsKTGhgbz#m}c<|1My4*c}>D|43)c>vPQmKZLyv0Zm>*M z82Zn?yRjq}Q*SCR ztegB)Y3*;-d7ONYS~!SSwBmF%EDJP+P=XiPsvJyPIpv#0s{DL^JzX=>R5^x4L&+)P zs$kT?-kL6C5j3#Ehrb?r3Gx^0OfbN{b3U=1`mJldY-+u@i0DBcOnWKCAhamQ!c$q;xM!jNu%4lYrSbgTpf-CGdK#Q0Y6!1!YlTgLy=MjZfQ*Hcr zI=;a3g3(~#W&Pb=_RpQqyn?^A%EZ)=l}4PUz}*SF`0!rj&EFEt{z?NaT&mo7(8tQ0 zy7v0|HulPTj50>aM0TONA^Kgc>+Og zpKD>OTIr;MwS_6xxHi-(ix-KBeNVZ4FY(~q-6G%L{OZw3*ZyXph0U<{{*cHC1Y1dB z4(x)^1_|wEQD+V7B?805p(z<9vi-Gm9VJe!7HKO}2FFWF+GJ^nerfF#dPcc!1DE2U zc@U$_FayZ{5Pnqyz%n9xmhJTJ9}|g&8OMRxOuDOo9b}hpNrqGeL172$6b7G ztn$t~vJ$~aa9vIsloI-U+yu~vb2<}ke);~dU%fFOWLjC$b9ZS5-#pkDX;Ye%f9^5) z@WmZD1UiW9qD&cvji<_vl!yft7vm!W;h3!0z1Hk zbZLUfa=J?K(se2bdH+Pb{l88L5^Kob5x*{Z*0)Y0g&*^4;?3n1f115b%BTZd@5_mC z){ln?-^`ZMOc@f`GBH@tCpAnf%v1y=8bNN`1kJt;jUQ0TUN|?zwW^l`CzXqz6bM&g6pwC_K52WuHs+yil&o?M)|Co5ZWZ3Bbh*2wy$r~-JCcCyw<6BQ>UtEs&DuVnbNQ-X z&2;&c#@x3VF+sL-5!X*6zMGFyj;Pxs9|mp~Qe z{t-`Ra;Fpa;hTI9A1%^(oU1W*$Q1T5L~={xN0yg6Qu7IOJy|S3s+#GR@{qSR`>7iK zh(A2#Nwn{l0gnSOWuPzm+cT>I@!Vh9yB93D_>ZX9YV0s>ktq~L0B34t{;?jy@+#|Q z!%TnSFMYVoAMpf!7`(Q~f?w`uYuoaVUldQ@|N0rk0z|)hfaX9G|2;E*#cHPkt=j5R zY&WmxQl|N?sE0L!W~p&;KT%fhyAH3PA%7~A69nTjJ~Q4|^$E%{f_&{@yvPeuARMh9 zb&n|Z261pR)3Se-TG?uDyL5+ z8e&0kDzMra!~{YpM#{>&n(YI)h9>U~RbPB5mH(2fCi;`j14qMvvi! zdSy5aBoXweOPlP$Oa;T-& zjP?rxuPdrOb*&r~0PgPK1(;%da30Pj{D7`7M#fs~73*xh|7n^F(Po6k$}*r@Mqb`9 zS&Yw3nU~^CQfpMlwTq&l2d=78c+nuKamrE*d(b_#P=^`x@0ZQ>@<|#LH3i?^N8XJC z8*yt$b=Y8ZjKTZ)Tt6-Zb35)$B4L$`uQ;}{E5|Tb*xt*=GYw*_#XMf!n1lN_GB_ux zq<$t})b+g`$)!K(RpxNR|2R8Dt=_y}&ku7R)+UXC&@0W?v&B5_Fs7bDz!YZK6rRVU`$ zKzyEee@T5S$3W`d(9j*}W?d1oM?ZL@mUG{$LK`oVg6MbNvIYAX&@ME@U6Qxdv`o40 zrwAQp>>J5&+f1?#CJ00>2(VLxFj2j}*==>%n?;X1i_d|sGALVPAvDXbE%ykXM|F!C zzRhh@d2bSUvqu^dAK=>(WxRS5euOU4J{&;HJST$v*zPNrc4bo`yjK z*?pN2)iqnaUmsXM3*3C*OsSR7wsmrQd6<#mx+pgHzf}w?-KFD72#X8UDSxT? z@yicW5trL9x<++h#JopoOgtYG#|*`%i6Nco*uaEPzjX*Lz^=5?$}ZaNv1uPKtH_Ji zZ$hyz^Md%6J|=&bspi{bjy?YrtUg_Tk*-V)&E0Nc_Ve4iL(?a3C;mQ-cWJIMX=zDO z?GR_r;Da$G>w3I7cr80;yI_K5>6lhT4$k9k04#NARdlgy}7#GqEeP(r5_pS$7x}) z*lcsJ%u`!*#i~dr_Gc3NROsDASn*5;xj(1fY#Y36kIwrc;Y0r7c1u3xqiPy!$EKJ7 zYg5HqvEeZOc=ebUztG!xX@{)ZeV4Z`B9&Qk28-3ANCsF2=I;ds{QZU*CR(5&<1PkeTPOz`EfoQuiEI2l_hi!JO!aZN^ zh_!d?6X!>`5=z70Eq+0CeYpF_Ih)8&Q1Ds&jhAaJ+iU44FN1Q-oK*($ksm^R6tG#7 zcdFI7sL5q4gt?yT_$e$cmhPqt3(MBh@Q$cTkVMB-Wf00Jf+yDU&`qo|otmOAqs)&{ z4<=~5P z@bCqh8s#eS>GC295ZxX*9w9rggp6S?YE)%`yZPl2O+$GgR)U?-*sFSPx-$E*OH+H| zh@4?8=Kvf=>l&sJNd`#+-z`lX02W=M!VX6Vz>Z>e<>s4HRsm@E=LV>hqi0QV za$&ZLH?O^lz$<$_*;Wf+-n%cU(0SKkJ4zV1S7~@L7xHJfa1vp2&pL4CQEeKzFjkG{ z)|qCLx(4-j-|H1^Y$LCt+ayyGnDeX$XtXDEN&%c@)m*TQ3a8Eli2^%zi{Q9}rl<^a zuaIeNdYNG~RdyaAq9of&ku2Y_P~Iz5^yW%9JMm?dN`7l}GDrlWagnG@hi5a!i!EpW!`JOAsCv76~-hgC^rp@^m_crXVeVzW%h1e5{;+BNpu>}``_xDRaJ;K zo2j?eAJE{S&NgWwnkM!9$_Npy7;Y~RH{c1T`173XDwp9!k}``u4uJO1w@H@m{#6-x z?|Bhfh90~!eOJ}`39;>m9W|@YQfXedX$%+n6_Ck#FlpUNC!iU>p&|_3VYl&Q(Tbd*r&U z^Wk5m@<#;3k57Xf{c0G_jwHhdG>4p;at%u=-{B+jeKpA=Wii~qBV2qO$p}xQmyS6? zSc~O1Mv4dn{uhcy15t`=n(%>;=`R)?((fEStRowT09eJ zFP>U751o4CCgJ9+;FKuxMbI><=+oYyvXg2l_3A|Q$ zixAxd?B-|96;l!lR`}#nYc2RZwGKyaE%jRoImAKk`L|6Qf)rl9lkP!YYrBP6Wgt{` z{}ez~;gc^ebBcO=hK5FpiokicB~39!o!14E5@B;i`5VAWemoHTkIYdm%83TR% ztFlrcmKyT=^W3-QrgR1I!%8!-lS^~bFuC55h&#H(^F0-LEs2_Vhrrtz<-<-=uiOn{ z37b6Y9tN1vu0E~0K&d0eV#Xm(q2T#rV;)b@4&$WS>*;@2q7cW2Tt$D5*3S1ysOyK= zL?Ba3>Sawa6Zt-Gu{&p^QeRM$eD}Tvttu=w=Sgjv6O~*TS%VkiIq|{vU!X>yr;B9` z!J=;ZJMJAp#cQ4NmIo+0a`-=)#xAD|W(i23MFt$z35&V3iLy61e!|@BtG)U)ib15MI%C zwUiLvYjyN78{9H}AVWi(k&`oHdm*9?O6D^DVML~57r^hI^zrV-+jL^l2!LIa;2g)nz3S+rWz6vzwYP>G*+I$29uveqD{o-|AhIOa=~)THgxnsVPe zvzU@yrC23c_g_lk!sC_AfKIB|$781LbE-418=bd?)b2}Gi=`~*NMbmsdHkb3 zgkHO97qa#rcK3O35FHG3ja9c@R~OHAmh>uVSy8NSI=$N4Yoj|sE3~cW1G8(1=W^9`00edAaTB2EQp%=n<_rqXO;v6UO*rXm+#g$0(z~TO?$C;QupK1aiubuT$i3CxmbHZeI(z*> zr~(Ube4|9OiR6KW2C}X&F6bRhb9j!zjk{teeFiCCAdhtNdO<5izrrp&rn%K)1 zNTE9S74U(mFL6D_N>J$CK;kyi(ZUd1#NH8jTXe@2eV4p&Zq&L06<3=jLo?3&!{BixJvT zOk);rfqKcaBN6>?kHsGdBz(6>_huUMyT)~8EPg(|^RIhSKJ=0XuNKj!Ckeyc>-B$R&)zX#dtR+2Ge^#0~n zGX;|f>0COdmF_JI^ol8cOe0fCZ)gRxKxb!3s57UR7cl8@-@0R62bgtz^go*}W-bpd zS&=`Yoi0yT|H|%-!~TDk=l)k?3;$pLfA!z&fA>i%M6mjeNK9ooPy>%RO`-NJ+-Wp_ zP)pYdW_~uLqs;vX+iJC;dUu;GOBBc2h;dU$5SLX^-I^8n^y$M^Y=SN$b*JE0a!+)9 z-tYnMNd3&uQtv?zTW#$VsNUeir7}r|(71kK(9Pye5P`fiFrU5GF(d99>n@o3 zlL1gduypEBsG`mwCk1K0 z#=AC8s|W!n1ynQMR?A+(O3N$f86Byw&bKeWT%@Y4RnFQ;5jgRnb(aHmOvWlLbcK3$ zv%0alc53-m#so89K2Rq$t@SV`j#$g;lv6zP!f1_H@2Rc^?_g(fk@jV0Q zAO>V@nOP91s6a@jwn6MBXYOX4_$WjISIDcfs2d|6Uf(g#L+_7E5@X{d<`h{tm| z6Ls;l{zGUwyD^hi+~)7C&sA3bp>6bF$Y*D%orn&4flexU;Md<7W7$t?Xje(4IA$JZ3eH0tO4 z>V0pK2O2~dfB7?;_9-90QQjep4=X5MT(YB4qlJF|tK;St=9uE_5+!3O9n+!Dz`liI zQ(H@bkeycEzMSV)yvRYY@Q<8#BPk4u=LzK28Bnnt(BiGSnhc+0qt3896RM^zBV6<$ zhYSQE$UXyp{j8EOhZ*A}NEna%s?k`u9`7`AgYdE=Br~(~M4f$Es@$5p20~`f7ewx7 zi>d!mFpnLqc`fHn%K~niH1bMHf-bKZg`h_zJ&k7q2X`9;DKcIB%ToN&hQ=iuc`VH3 zPOhV{z+y0XK>M(1`&{K}FA8UNv{0ytL-in8T@|$fB??2IlI`PnOmv+3)}?uJUsr5W z#;&K=1Z=^V@5U&yZZsKW)r z{o_SfX~48e zC9mD$eSjV@=hIXgHICU}=#V1I6X$?Dg42d>$kB_h`nu2oYr(R05) zZhTN$N#x3j)}wh<@22#>8CcXl1wMKJ!hv)>ia_fZsgN#Y{VS;dLPZf1}XZ6nl(hU zG_<)My2iYBGBAOtAb^T_ux`-wD&r~g8UZScqS-^+h2qWzy4~!{S6=Bt^LJD5R2!)3 zsD+yX4ly}RMkib?j3w7Ag2u9|uJ=kTKn55B=D=h+doRsEuboZL=v**VZ7m0Qptbp~GknaV!1-pC7wv-BeTwTdC3&LA zMaA6KC3rdnJU5d??gbr+c{l5c4{!je<@4tmH=06oW`e>hMSrIe><<#?({~X$&$q_ty5OcqXKa$KFnEL#vs(_Hful61>lelsH zx#F?_^yVU!XtwGg6nzk3r&6Xhw`SjvRE3+DokS=Q!ZGlu4vNVRUV@Z!_S4xH))|&} zXkSmD9zU;AKjW4C1Hmb+{UbY2!UYaw(_bnOu>C`LlCCe(4GVJ zT^^^%gPApxnKf^|&it1CC)JX`gI8V0{CY1_8QB3sDe|c?1}S z)eQ_Rb*X9f|H*CJTXKD{@vWQNGWU?!_Rl}74u83!H>qel9oHOx%Z?DF`-9jCp%`5T zE`KS!#A&%POH+Mw3}elcb)NSx7*yBH0Vz%waOBdPQ&rf@;vpPh&hh*s@gWGwg2i2{ zgT$()wDLpDw5svcBd1R+*6$O+b*jso)?$l6fc=&v=|JR~N8Fif6BGaNur33PXG!kQ znx*r6GZ9h2r<6d^nLtI3p(*{)!^J4=0fx`#FPw*IdUmkm|3eUml*3j)<6BU1ge;=P z_qc|I+T#qjOqL)2CBP)-$2wnJE{aOK3yGdGnp~fe72`GK$SMRHKwB?m>Y6 zyz%vklWw?W&<+zhJym7C>b=XKvuDRZoR)*Bq*?~CKhG6J}3b$6*lns7X%s*R*V``x(uzHF>MX3J7dxfz7x##Td0{G$^!E zOc~;hH?ecmB3Xma5^`;qcVWbCEwhnzNuB22z%)m$hNnKsjUeg$pn_d)XBPd2S>&>Vz1=>Fa%)8#_&ES-s7l2P=)wMM9gn2@X+Q^A_ zZ<3FubQmVxD12dO{rOun9Jyz><)`-YD!I<}gxRKcK`cXF5kvF13MxS@VRmrFz8LjNLio4?n$Ekw5!_8vx+DN`yQ_7GpMKqP`CIyslC<`QV^% zVJ!<^;Vd@`XKoyJ`wVrPDIi?0kt+>@EYD$O5gRR*wT zJ6rSVc|YLiWxt+s!!zsQo~_pAa-(vVCnR`e5IaxL~(Jc+`$Y#RWfSEYy6ClTym zFIy=3%28Qo>Cb`#CMSM5JBME7X0Hh|Gn!c;-63QbUCzk=or`iiIv}Y7>A*BcTyhE8 z-){F9xv!bPA;U4)5j9Dp*+5AX?fj@mEF%BS?Vv$FdcXOL4%+-RZm7Jt&W|5^kOVbK zluO1KnM4mml;ZW!iqj*uWx`RfB&=}+Ce(B}*m5~{d^t^Z&6=s9oSSs_WeiGsW8xz7 zk`gd+xrBcsK=6WqiB2@=?yI)~<=_3sdf8Z~y5OF&+zY9RONn+0${CNG7otH>$=4RCJ_5Oa8DL6N1)z->USUh~BRrqx%}|?dCQAk4&q@s6lx! z<=XooCBh6Yew`ss85&UJPXu~JO99p2C5ZbrQQ$)W7ItuNOUd_St$y10?}~`X7xDAf z5f8(sO)d$aUq(#o33_-nYMi(Ii0ZO@FZgZD>K8EG7NINxhkB}HZXPX4i0ub2Jes59 zv}sc3`*3A}$;BrTl+4ToJ6QoOu=Su=+cfbjM*~)B36=hFf0gJ2t(!k2tj9oEuZWA$ zI11?&@Tc<*_ti(vlO03o{ngg}`V8nS0M{QgLZWD=Lu5`FwzdeB8EEvDjZ6Jz`$XW= zH=BGhpE*CyA2(h)zVr+Mo7naMuxtBs@q^|K@2LN|G@yV=yURbaD?91(u{qIw+>~?c zgaPitN?jzju{KlGN?~LmLA0^Aj7={7W++d+*{A-_po6Du+`dFQ#9jPRScil^#0Gz- zA}=3?eiI*fJqm^W(mXZ!ck}G(*?|@IuTK5#XMa*|yJ3?&mXJOl$pZN+a(@_HBv=p4Np%jw9yG6ph8fHK7_2@2U@+M1eIDxbE$qtY4GvP(MPA44 z-4!8xWnbZJ-3XirGH%3~Cwms|z)LhGYnodFo}_&Ew6duEnP_^GHE9^$3V9sVKaACr zK0~%PWONH-dSFGbH1w;3X)?uWnv}aI##l1fm1l2`_8xqQ0$5(zCEUz6jyD z4q-qj;mqpsSNm?U*Nu=5tsA!cnDFwkR;M}gnDc##(z%+1ZrqnRs25?c=hRwe{!t^m zb6j=JWhw+RCit;~gQVF!L9&z&McfL^H>efv8TYB?iNr`pp;t?p+2L*V)(rheH570uy}?$|A=sE~WpFwqPQNyXB#aZU z%tJhwX){_&z8uKVYo5wWbd})RO_j7%91v@7ZggdW(z;em)Vc)sKj-`1^e5ZltiUOD z#?pdunMx(-bHTW0mZQn(^x#FPjr>*Se`L`hDrbUsz&a#cl>x_;9ey4pG%lXk2(Z+EJM!TuAj5WT z(Ua@L!lrNaG3nk&iP7l>GbhX3hwlI93b(9BMcA?8&SQKe$X|I&?b`9NX?3yCh+M>8 zOWb_ljnUy1%$iv=UzguUVL*}GENuk8lD!V@*?W^DrE&t+HBV9bOjYyaPRES##Cd(2 z*G2@7D6%O(0a6*nGPjP0gvlH+YruS8rpMNq>$<(8v0m!tH(V5m>~P(%!Hgf5Q5Anx zc*A8ovMZEZEA+p+z5efkdH)4X<$uMJWy>(_sWy$grNZ)bI zeg9u-zW;ai;Qx>Jsr|np&4gjGE2AsAt<9`LxyFZwk4s8o62WU^is@wOQ9ZcTNRsTj z3M&H>LBptF#%C{cMQ`Topfc0zm*w^2$*&*WUBjv0fq8ue`sN(V#`F033fO$QqlEGw z{I4&w`GTgeg9ZO&Zgz!Z`at%!%y3v;o3&-4b7PZ1J97^8{NA$lwYTQ#&xM*=!In`{ z#^%v2z!#Hmujzfsfrp18poI;UYZv-cW_4h0*7Q-wEJ;3uRLWzmO%*LKrMdsS4-hU% zYCQ#nOxr>gZx{PknA`FEVw@b8g=S&V%$XcpLaUs%*z0dg9bY7bfYksjC=BbW z%;H^QrCqlCc#d&iW?S4b6ihLD&##7@N|cgPn5b~zQh#ZCL4EdVH4P5WhMVL9xypM_iBel3t!#S zF0reX=Jp`FEqDhYlVJeaobFi~Q~DusVId_hh~D72xAETzu?c_g@4PqSuh*(P9qj?Y0{F*+i{NbvMPto?-1K zN9v}e&4Y!cujbn#DnC+0?nD&>Y!p9X&8L}azQdV|mV>48Y(LZ6(>>=&2OGDE-7g;0 z?LJQbmUsK%O!1M>vB<(>;bkq|6mWshbfX%%noB(T)>-sL4D!WT%zx(lKv>VRl_dnY z1-Dt=-k(VwHD{Dy9~f)+$?IA1M+?q#A&fq!_tjI=e`Jk7JkN$H>{5%h@{6zho!v-q zcw9B>!*COJ396c?XLn9HMpNe`_TLtFTe#yaAmBDYmKbma>6&^;jVIoTUcIvdUdw>8 zVJ+#f5Bah%2S<0{pAP~Q=y#@*E;3t4F*;h3w!`G?S z!Y#Dg+Y$r6lz#;?J8%Ay&gB*2G1lbr5@|rOIVKk>)>pMneqI-R>vK4H=4ZY&xm06T zYqE7-?&r?I#gjGuQE0r+t6yf5>`oD6bVuYEeCY?#~C3E4=B%A~cpt zoA%4$vF!3pts&?HbR?4Y_FO@JNPV@K|-+cp!5y8Fjl)$^BL2eX>ov_Kr>3O@Mw zVv;=HL3SI0GXznPEaw1cCbIT(5FP$!7Wd_3h^5}cifoFpGeimw-fX|+DUZ$*VdcY$ zy6fu!PCLq=Zo7vU*_UDnt&2ic729pUY(`aH+uMTiJ=M?BeZPutLCtPGWU9`xQO*cA zwFC3b%|QV6ujAMP@hs;ZB|MHBWb2HiwaFL}_if6Wg;`qUyiBwnHQWb!`lAwL zmU3>y+Ghs^KNw(YVdTB?HDy?wd2ai9C<@B3D7cP1__cmSLy#B{!5oFFUYR7xVw+Dj zC(GgP6$gi}#oz){xnFX1a<|)ZzKf6DUaWGshk8~oF%LPjm}$GU4uB)WL)%6;vF;_l zD5k}W7Tmj*i|^JU;`oRDsJA%{-9NrMuwz?M;mBH0=TRsydyk3^A{)Q#!h&vETI~D{ zokP3xpOn|l8`C%*ZzQZk#D!+OqHb5G@FlwTd)irBu{h%fXEr^feRkT#w3f!K>j%vy zp>_F*lRKgyYG)spr!-76;!eW+#<{b?`uj2E|6dxL0J*J^i>kzWQJ75=B^JWM|8o&Cv4u2K(f$EJ*fVtM- z8-HMnxwwAz75`)fG(iGmn|-X^R1j)#Z%p(LM|&zWFSdS5BAq_BCpzP z6V3Z{Z$)%3H;TQ>lv@h;HVJK2kSS!75717NS$zie2nq=A)}{;ZvlJRIfEw|OTYjH1 zYKcnv_?ab7+$=v8(ybPStp7(AcgYwunGI5dLBi2*(XewAEs4huMmH1??Id1#k;eVY zr0c<-jrTniowQVZ<~1aWM#3*Ba3=R47^|uw9@P-yOIJt_GmOGbYHGrIHGwW`CaT+a zFBU(#{Y*SC-X79nA$nsa=Eu&Q@bDOf<@(qyfBhAr34S!0HL|>I>e+;?MJJ;+DADma zPk1xOJoaXnp@vKP3F-q-R$QHLJOf%(QhQf}J7{d#?4~eTE1X^L0+gr@uP{+s7WyDU z-dlo=rdVQ{8b$mSKd);U73~Dd{CV+@Y;*cWs#2Nr?csf33PD=U;Q#zq>%YZI{3lh{4682z`oAYh z3J72raBJ4J9L^!}(E8{0 zN+W1%Ul~T&(frslxl6VzQpqsxC)$wrv$^^GpQJz5Mmk)dEJA6A9nRifd_!pC;b%@&gx}ao+U1=& zZ)kbY>$?wr%-P>Nk+%xXytJe@lTFKe*Ryrwfq?<8bWJn08wgWX#irmXuy-cf6EYFy zOtVT(A7o53S>A`gexxISeTa?aT?}QvOQTks+N}a#Gw6#;qVt815e~V1JkaLBJ)%O-5IY4xufr4PBSS-qUN8a^0@(PnWznVifi0 zE+Fm;zF7WnDj$P#8YAeoN#c2XvKLW6Rumdiz5wHPSpY@{_C4l%HrGeEosX)2y)oeg zYOp|@y&esrmI(JfP#q%LPAf~p%a*&yp~t)F-um1uPEF7t&#PZnhvo-1 zMM3wKXVvBfd(g=K)}zR`PcA`;7n0>lIo3 zp7Yqs2;?1Q>$NeoQkNQqPbH1*t8~98QD2(USgqS)(~sQv8lQdO?{zoh5&wgFV-ap3 zq;&l`9%z3#n@qym#baXSvCg4IJANI!Nyfob7OwZA!N6ojx=V^sMCO*6&P#_hUQMLf zSv7e_6d@WHZ;gldAt?}+xY1hHG+&HHAYAztI^)aXzAuP(Eas-DmN3#IOd-glP(kZD z%a+~kizLxgH42g<&Mc>#W)XR1S26T7AHB~uh;$2lJ+VHbYBmwj`UavT`Jt6Zklxl0 z7#aOX_MT%21-Od32}rz4KZp6p(*s_M$y_}D*S}#OkVP7_Z_xDsGv(-ds!ZZ{qZCcc z^~iKUoS`=jro@@CXA0~=W5etorb^EnCqFQzizN?hUOGT@r+p3jN zYu4y)9lCkfYuToCr<$bav z(rcQzUr63AXmU}bdS?g87$2NJ=^Nm)U0>9upo-#pkSTS?%&VwMQko`&IA@Xm{C#iD zoq~RP<)m(8Re$T+SlAqpWdYA;nZ-NMHYl)>bg3DioqJZ7#eO$rzq?;4;fq{vr&}7A z`?bW*TFdY=s&ScrWU=xnrqgDU>T6jwMg}IE@}sbFV3@tdnMs$KBj#uSL&wi-Ta+ov z30zSg8W;0{1NkJSi(14jJX>G7d3ZTVp>pSq;$dEq>tFXu)PNh&SQPzDG3$7dM2Rgu;Rj3)$_fxE%_W?DM|Yq2JN&_ zH0o!c-#@$l1B`7@Rw0UL=MZ!|SC~_+*=W|rSYD~}i@SMK z!^Awl;cQ&S8oc-D{ae|JaKCkMoBA4@8hUOBO=s!&9DM0dcF`-(Mrlq|50>2Qy%$Bk z5xZF(WsvkOaG-{uyPh44@Lk`;F?{@ZzK^EMj!6H@Z{pg}64GKV{BE&r@QM3wt7J8p z&++Qk3jal=GBqZi{S^mAe{|WB>tDG7x4c(bJi$~My49$b<8Dpf+h*P&qA#`M*y5zt zl;}NsFS}N+_!b;8c>cvD-Y3U>biclqahkt61)$h=X!jkWi(DEhg9@cCXQgzx*Ub+X(KCfTj z5~XA!!H?`B0kWexNkmMypCf$nauKLqIiuv78v*&%*jCpyZZkhQQUGToqIo&SGYQ|H zM z!U-bXSweFNxhy8URvswShp1w1(^g*-p;2gQ*Y}GriLZ7*?)g==$a_$82U57352{{p zf3xgzLQ(d6D71L{?95Zso4EnK63Jan|EK#S@L--jQTL{z+iZDidZ_g26V)F;$D7!p zFovhWvW56C+_rJOWS_0ObG@h04;}j+)e1O?UhB0@bt$AvvW(J{{usQU@@~;9H=@Ye z(A|9euQrz9pcdDNIYYg({~zqVc{r5s|M#yX2_ZXW%9?Ct7nvekk|_H!MTlugwvj;* zvTq?|DO<8LjAfYY*^-^H&InnC88k*Sru%w-@85mjpU?OAJMQ=QcW=MrcO0KT9Ea<2 zjBDnc^SsXMJYUb}z8)aQ6Os$QoifY!abCt?x>Dp-zrcC zpk_UJ#Jhl&0UF9xg|@?G`kwMVdZhew=iwwm&Aq}=NBL&~3gpz(1gnmI7|oS#?RIa* z?^t}0Y7PLuxJ}IHinp_+crPax!0ozhf4ra<73HW%wWtX@bN4dM4m4cyL{vXM$?P}q z)K4x+#S|Dt!>S-;Nl&OCO^0H$1YtU_vv!xnyN&VryfHFutus3N6Pq|AG^eMs`EEJd zaIfgzXtMxTLjJMg5i77ylAisC;q*>Nt!^Pasuv3Y0QX7kKSz+$7qf&<`p3Ivoot?t z)Hiq#Dd?KquR5XfGn5VEl1Q<-_J<+dZ;`Or5ZWQ$CgqUlvP%bc^;phtIQs3`xh2=R z>F+7!r4|KdM1(pkM1xN62eW9i(sUB_CY~acm?M74l;vD-)bnF1OzHFT=8?o{Wxufv z^v?)PiaeI|Y5)!TwIM%n!c$A;gubg^TR!%;ix=5I-4o6it1r(JV`zAeIr$;uCaNnBxY6marB+rxT1(V zF-6BtytIl71Qt76o#mSg{FD!cn{Ah#)N@1w*oHYrcXH3{mXB^pr_;{a%tBYICvmS> z!+0C`)EAtz8m4~F(ofBA`=B5w)0apvE4-{rM1|P<##eLklzf`pcA5`65;c6X4m?v) z7!TWajmHEg{j@iXFqF>8Uu->U&|)uu`#ivq6e52SWb8)k9l-j*CpR=L+JbT(oJ3y6 zuTZy~L0Q-5DnhD=G^~NV-Z`)XWXpFSs z|CSJ{CgrU2^8sgXk~>usaAiY?dOOzhBq@RlI0_9affoXaDzJ&}omsZuFJ@*tRbr+a zW`qDAoOAuL#4B4lG@_3P;Ep=7BY!9IPa`#}qUfM0nwumlUl*S!cB?+Y2>Sbvx_<4`>K~^IPx#BHBub<>ve?af0IfvnJmdUov z-sz_|TUH#`e(iF=XTD=#AVz5Jb!~QF4?F;ZtPv4!CT5W5yEx_F`$4UIcXe5?PU2T- zDS?_SF6QlhW)q0Vly&@YWFCK;2txfkTEVXZOX=U;#^P6*QTG{8sK7Qel`aFBx}X)q z-A<-?$#JdSkp33L^)ul|t@N_qQTTG`%!{4YpyAr(Awr8=cJ6G0f0ZGniI`|Zd4^2mBdi&zLf(8cf_dBmbYlagM2E^1a z%~FK(c(G6G&HS$mM`6m=@#&}(zUtNoHGr-Y2*iC`9Pb|ZBDs7t zga>hxbT=7)soo?|PQ`@Ppn6Ps_7q~JWgj zB=HwpQ`+a7_-x(C4t49=g2++%6_c-brUDE{2b2phn2TNMnyb^dV|oGMq4zHmQxR5Z zO3qR1kKn2Ny7v1=YEru;cY^7 z7@&;seR9BTyluWtX+sU;o>nR45H45q;73w^s`*=$#`%5iSV}nc&Jz(vN&%=t6U+e( zxj1pQgWddqDkXP{hw@M?lBXHGmwVHRBVGo2N~Eo6|`(LBE`tK@U| zQ2bF+Q_GRu#k!vd`(qiouQdK)u-)XoT`iEVi)?J~)u5V&^a)K7gsbf4^Ejv7)6Miw zc%!je0oRK2#1%7zf>nAo9XDB?4~nN>@)LY__^j{JBK<>AqPT(RdoH8<4dtfq_OEM_hlvUKQ`g?+@{^b< zBi!&eaW*R10CRqru==Z9jblGt`t@DjJ0G>!H#i2*$<&)-qdKh?d6hA2?)({+LVg)< zR2(?&W!V`P*0*GsjERt8se%*uggIKph3HsgVFRSRy{{PAdDIi*w#Y+coUB)E4DUj{ zSeZr}B2W#fR_NSaRlSAh@?ktdID~O{+V77vm7ddI-7z#7V84uhyIqB{6_Ax@m z{#Sx0*S(TtA3r4{wLC3R6$@-q&*D{IC;jV;WUr%4<}Vdi7E@K(o$-F3ZZdtYib80> z@^6(Mb8^%b9+Is!wB|c$c9tM4c0c7olPsZ89-0g2;QpwUMTSGz zs(KpIS~&$sB5SZ@J%>xKh;!AweC45Pl*~%j@D?v_vnpOFud7wwi{mKDe)8dz!hJL1 zd~wld1FaB9_W6qaEyTOxt1IC*#q&VhXOek1hf7S&K1R5ZsZ; zb}?y?&N5!63=d!RKQ=z3@QGc`;x4JPKRl)-10qpi}WqworT0pSnqb{f_CeewrKk& znlhMMYUEdtvN15`#w6=P2`X12=lN&Ge6*U(dGA&GghjkM0W5CXtFU4Q6b$}4wEh0i zYp94{Lnnoe%W}ZhcHUP}ZF^doJhP_H-oMMSF=o~ma1j(-QRbKlG2tc~z< zR`V6=;ZHJYsQ-wl?u1oB~CktKAF_FkbD1Xy}5*tOx!W1L@t~Tma)pX z8*3NanYu~pWEs0S;p(FID*By$XoX*Nku{s~MXde9)|DY+oYt->OaZ#wSDwD4TE|p?wzz@agk^_=u2#`djvVlP z5(7!1!;#`SdSGpw{z$w=_tRkSK%|lWABJD-vVo!+JWH`h^U(*D-eA^HcSs!l^0*i6 z{_@@@fb7;ri2y|)XIY-g%8MCZ$X#W*ps4#|H?Xivdqj!v79x=Ka!TiHn=pM2d?rM6 z!}ft!{$}%;v?}%+?#*qbC%9}JjEZCveT3d^OC;Nzl@{QSx0nzpRj&vNoq>EE+48L- zY#k-`PHC$zk|C|+?9AgQ+(w;! zXhC48XRHOX6$uAYC{bxhN51)tZ-Wm#znZkiOP`Nw$6%{EAcC~}UbtW-ok^rm>H2f% z3yW_hY*#x!t&@y+V6u)6Mz1{_=Gt8|ESwe1Y4!faga89P!Co%<(89$;vwHM*dAX;^ z;!(N9yTp}-%ujQ-9Hnd&s>~LQqi?*Cp1v60C;xK@SvAMYy@F|}F>9>{N-+74fLv4N zzbEDVSHVxrrT?m!`v2e@kU?T7py^wJ5392?Rp93?_U7)p%e0cS7Zd4-MSS z0Q=|Z*L%+j3B$ZAyTBN~6^dgeW+YfGd%g0~Qdu@Vq5VD{M^O;PPy;7sT{0V*Zp5@N zMr+^c(NuDQ>fLsTUNcy?;61FWpf75x0T0orUPo2RJh5%B^EWDtK#;fW7&8wyN441X zS@J^ayIwgxDfC{8)U645)KV18PgS4=17eFEc8_ao(Fm23VNWa1UXrSwcsL2zujd(= zQ7tboIkld@eZ1scZr@mn)x*Nvs4=9O-p!l;7+U+k^>@qG=7gSab~7Pcm%e%bEKoVr zpnB66|1j`|KAz|343Q@;zS>4x6hp6((+WM*?#mxJM4Ap^OlC~Z;DD| zqAfy~ZiG0`&;cVU&kp_cPoNTu@FM9cz|cZi*wpKtr%3g!ZRN5fsW(ziQ%Z7QSkfLp zZ0gfESqTL8o!Cz8Y2zUcVuZB<0`XuW_{{z~7G`%DJkf9u!GLcX>zfrq@#}J+;7g-JR#zZOKr@NyWZr zQ}Wh zI|;(_&rGj}41A2TG_k2R`^ykMnE!uh1T%sykwHiDt367QSyFM_8((5*^+#10*Uc)d zDUO>agdU&m9F=du5gyOgqp+qlLlvlr>A)`S>mP=x;EevomLH24_V|)t9x;y^fB{Vs<)0Nh)Jz1_0C{!#NX}*=5=>`=@Gwj7FCK9Q=67Dm?Tj zm&+f95N?2mU6~c5u`(yya&DM*(Gl2!$k)p_qL|Vp6aV==_>P+aR4te3)zE*u4iN9i zyR`WGgiYuo(7*A}znB~Wtn$4|z%J`QU8Ugv{z-iX8g2al?xX$%RT!ul1fJCX@j6fb z{im8T`8TwLL&CprQG#` z6&X1-*aJkzsF+Nz=j6Kys+8!RG1>0;`rl^CzfG5~Qls?BCIN(<2j6Q9OhgXT-Y$h* zL~ue!WjMNiGv&q5R6fD&%-oEFZ)5FjY!w+_XP&-lev@H|l%yYIiF{mOt{5a4SEkqh z{v*mg?;V6_=p~?<2h5EqqCz~)#z&=p#%>d(#%s&Lmx>U1OHEbnerCkMb( zAVA9Wh-2i}lnfjPn=L|wQiU%j_GI$LP03YGx0s4^4QXDz(i{~(a3YTH)H80Gyc%Fz zg(6Y)65~2=rR#MJVL{2z4$mJDPojA4Im9D1EX@hx&u4mJLW-K2nwNT3#dD25gU$dD z0Z<*N6C90|N(7Lg-TYXCh+hViNCj4iM~Ht!F+|hrY&&!MIi82*Ek#a-hXYfKo2&U^qm>~D;aG$bHQ0_A)Dx@ z#ZA5e?!QBV5Jte_x`94RqKS}(2@b@Pj$AfCA_C2a=g{OHW+kIjE%wZkVx!F@W7Og& zzFaO6bm(RK)ZGR=h=ffc?oj}z3x=#-M;dNFkDz!7)n|EF`TNY-ZXA;Ux7Y<Xaqcv;<$$l_wZ7p9=&qhGj|S5l1;#JuqAx9qK=TNc5~-1&%J# zLZ(FeDOBL#pqbMfB?X0yhxt=R#i8vFLZKXSjv^E$cd9l8(g`A_ARMC{QE1ep_NW}J zazE;7v~!AvuIvRyr4KgeK24VdsQmULu91a_%$+-N4Iq#_+5`{gq+|_r$Sh4YO>BQ? zR8xgs%Kn!5npJErT|fcje*Qr&ut#wwv|XX?7=m*mgl|)X2dH~{D|8r4f(-Zr_`cQU zcm?Ens zSLXJWQq><#sosvGsm=bBCf)}JEvV$fD3UInRfB2>7sCfcqa{B4)aGkT#7*oA$=4JI zXIrNKVX$d?)pad=eXo<-&aNw^JW`>abHSQD%DJ}HfE)ufArcWlaiQLtqcGkYMO?+1 zvP(2%1QZ!93)BlpOnt<}XgK7fUzt=?#|E;wF+C_fkrx>=2ry~M^%h}O5kMgywK5vFx zAQ{B&$BT7=oaoa;ddBBd`+!INsLeF)^_CiKf#Oo%c#vZ%*hvD_Yo#h{=3Ok#&`6PMUXl-6B zFAefA&fw0s!b#?m(3b|;0^Cd-h2^Uo-j!d$#FG)%bad`?>wWYOpB+Hptb?2 zliYq(1K`!YBEToL)!~~4-?cP}o82IuecXr8$M$pawn{E4-AyrjK>|OFu1X3XM~pnq zXRD_MFXjDZQJ5ia+5?Gba1}opWs1?sk~105wO(9+CQ+AZ@ zRUO;#7^SAkk+xb}R&7F=v#@^k{y03JBZJNrGI5V)e)K`hndpE_udNx;8r}-N)7Ru@ zT5Z_yJALBIEZHu@uu-l>W0^M=2cAvEU83mXz;70LDuwmiNb*T4nX*qSD(fZd>zScL z3Ns&)KfbbF8oG4rn?789k0UCtI-!mj5*itDg=i4%OEK`e$WDw5n6wV=i0u$`$psV{=Vb`R%D*{&vZS0{L&ZzXc&ti=i!ALN z=$svlRJ>@zd+LTjhSTT#x=U9O0NxBOMoL`b0U=M-wTG|o!?P}xDf5kRGRGznNFVTS&EK}6YDBn z+0L@MaGsERcX+FYIM53^6~eR*Vy25$<|*f%k6WJ{n;sA!nYZa;NU-5EEV%mhOA;yZ zs=m6eVEQvhpJNKLvS+OQ$uEdSd~E#|Yj0Ls3X-juSS_=UdXqxKChwSfWA2SZdy zR9LxH~IpWqNOlES$PcW?xNTQnfMpaU)(=N8a%~^7|biXDyJ?{IuwA@C?AYf=@m)FPUYD zi1&CAQsfn+oTP!AV6O>$_&DL-Xcw$U>{$%8rqw%(WO|a`FQPD_EdtnoEi3$-gxhT^ zD*{VfktyuN4b`mpMyh7u!;D3E+-%qxfgef7jC3=G=^#2o(A4#1+#=y7Q4^1f{cFQO z9yt}T!-i6wCCJMyp~IND`XmbXZY2RGiDOyJ$@(|Hoq_UO9TlNvmKNWLXb=y&BomEB zXu>WZ=Z+dP($-R~ps%gd#V!4ViJyDk2ajd*Sh*qB!(aO%w4JrTwlb8ey+dqo`9quWpGp6nN``(K9x$bQy~4|_&ZHy5kfdt>W&*lD*Y z5%}@fc|7p`HM54sdQ=v65)Z1e>2REPvq;7RrJwTh&o0vtM*mB1#Rhy&-zB>>Q#8pX z6v5?1A@ECtQhA>Qg&)Q}vgT8!PdDtOSStM;a+b;VET=c%0g(0hWEmw=F_@*;xTC>bB& zCCl=q&`<3}wwse#>D4N&x9NWv%uFm7ti|B6uFUuLST+as+d{9q#x0c9G z&y#;gd<>a`)JbAQN+c36-XOAmtPfr@t!Yf5BAY5XV0{)>V$MtR3T4C{~92 zfV7xEzw8cVbDxm&V>adZ!|>3-ead#VB){jQhOM=m4RR2fubdzrsdp?x*v9Zf2g^*g zIm|(1c6o7@o9sjJNZ+I6b=l=Nj^QR~RAJ+6ic@vtaGJMrdd=;mZh4OMU0x2FMtj%7 zg`+C4+YTF4^_oacMSm^irnAp#uXBK0J=!*N)??~^p&T#jy&=mQ+Aa5)M%7>*mbugR zU~346;h{-WLIaC0k}_V&`r07-duXWhuvm#ELlGkBkP7(;va?*J@gfeLkQb5HC^f* z`rv{pC6xrZMQLABko+AaC%LBIG*;=?YJ5ZA$i=82Q{H!e^lV(A*4+pPLk+ev|sC1@S3C*1AjT)c7HDJ;;WyRu3u%}t`(aC$Df@MxNogw;GllK&TssG_ANqqQRUaNBwUn>ST++m6Z z;WA?VIGCSQ;bv%DpD5Y)NGGyh?>zH!2{%tS&V;>Y6^QGv`)VO?(%$vZRdKp_ne|E6 z`@{!CH-Z@vglCRRd`Xk3JUrM!DR_?B+24fvH^3TvpgyujOkdv|?VU(`$?}usvm@70 zCB`jLcsP_V3NLYM40-4mkh_b;_b)hTGe*njXW`YC8(*DKGiJTU7XF^ovEAJ49O1d*l+<9{koI~7l3lFhKwDy5<-E@1Mu(wjWX$^L2+3?rF=G<_laaZ!j|`4yp`~47yq7m97w%=FIc_J_4M|F*;GK ztv=i2KMeJ5U&<00=4eJj=n;U19P@LxQ!aikVA%{-1k+2^kC&S}my&@y z3StD%aX?HNNK*PT1>(|T50w83H7U8v`2^|> zLgFYhLhR1RY!FcTL^_{cXpz-hTBmSE|EodL->cZUdt z3~FB^dUX@{dw*9<=1HVymcbW}QlydwRw(IYne2!8Jq6E2uVg1No|E=Ybuf)Pgw_cRn`WJnm1lYDs%KB+mcv&|#{b4XvLSAmR%1Y-l5qw`Tcz$9mV7pV};%;Z7 zqd^^InM|NuFF{-;adbhVGEBp6H;)E34m z4|{UCJy(Lr&^V$6&fj=pQUBide0u38h`S<0FNnR7j8N(Mh?q8+l%cOH457HvH};DFs}|eGC*KwMT{K`#7Ho6w zgwV~Kjs}lP>63J(@ux_c;V8R$sc|hcfSOK)esvqG+;Sud-58yn(>vGi&(MFKM_q%N zH=;abHt)1&h!p%j&Z~1~)~1zo5+ffkTfDF;{4o9d9D3Jo4z0tQ%2r)?S2Q^+;|IxJ zj1E-H1ZnO7PTF4kTB`n7eJq9>9%`O#TGN<|$FghcB={Y3gNwRXibJEMuIj(isk}Rv z>(vehs)Phy9Kiq=_0mrotxi8fai|y$W!m?d3w2+d6_C$=sU^uFFU{p9;wkAZgE+K@ zNy#$%hh&V{XM~B9#@o)v)5Y~3ovBzIZdINUPoL9U#iT~2nH>l;@b@oiFDYJuR+Rt0 zW}Sfl{4KlW|4=>o?-VGEfBpm3-(J!gOd;j|jW&`#Mr~$Gc{b3q(gkbx_)2NO(@V0? z^4~KUZCzq?iaCTnII0hIJukdul1Pv%g@3+8<1RHNLtzG62W*eCgL!C3S`6UIs$}W14e# zRG)nNU`yUZGsla5RDIg{8e&ES!snAmt(|+D>Uf23u#i#twsTPI>}ZEToC#-&=!$Tr z1&?!zqRe|rH`N|-3~00#FeA-u(yex)p7nkZQ+-2D5Q5-Yc@OhjmYac*?fK230l9L1 zgV6_8!);PWpS6#}l^{f`2!usdMM43IO^VEfZrF=z-a}Nsiprp-^xR@FSxMrHOSbHhL{bF^6dyT2RzL1M1~HxXRcHA1!TVe#;FfFrjO4J zH2w19-MfB3h^DH!0|th~I0Y@!v(s?K`wk$;Bd=$pHjY{Yo}Wfe#l=;~oUq}tRlR_# zF2Ctcb)pZUa@CL&AO2{(iPIkj`*AH5uM-UiT+9=`HNy6@(IOW`e(V*THijj?lXc0C z(U6$d$naT@ky-KL37Lfe_kb{(W+}X+t4$^bJ9c}@6QQK8I>ml<;^|jzyWZ1XE^mu* z>Cu830v=>Y1mLMVNAo0_lypHNY?0T##RoO&d7oK1Ov0rlcQh;Je%=rW;890bes>Uw z*5i2)_dm+n`tQE(lc!Y7o=*CHtkcT0Y7@~O#$BgB!`YMql8Vxz?9)Kn1 z(?x1xR+L-COlH>R9gMwwpr8RXZx~Wy;e=&%&|E6=Cz8hM6%9C0)yX5-ruNp_v- zxuPR8k5QQV$lurpC&VZBK0SUWyei0chNI#|_nnVpbtiT{1A337j+jgN9DlE8t5T&5 z<$KU&DPTN?^$F+E$3G0_BRdtak8`DM?)koYb6uAPI28>Uo2raHX8DhEtZH@vHGw;c z*wM|UI+|q7810DJENLCuiA0f)iKpkqcSfAjliyM9fPiB3kH+LPyfmzr3Dd15-f zN$#RF0cdFuayXKJq+WNvHRaocxL7oQXg+Um`_uZW5qtmKn~LS8K#70^mpM~5?r=!6 zuO(3INzSV^SjN28=Autv&Un(?i6Nah*Km9eb8nksb~kwT#jlmt?zBR+T|MRG1sl)3 z;>HH)dJ$2JJT|m~_C*rla@wTcfR8OL_G-quon?^}i!bRh<4E+4-XCV42h0^@ zw4X6_`ov`V?~0-lvc(UN)WGbvF$R|#YSN|)mrK_M_ZO$D$crQL2qk= z2bU{+g(H6Fg)bu2wp^%?kWp<_l4be|fPF_`w3*B@xSP=I!3X*CHPWN9tX-uxkQJR!cd57zElbBV0Hs2Znif zi1SljAP}_oK|||H6L+uASldt+NBvT@%of6xlMfi5zdgc#&J7LzRPrk`s@7od0H`eM z9yMt5EUbgh`4NtTc;F}gc)81e zlz(47(EyCsTG{{Z9pz&T!~e+N`=;Yq-?{G(bfJ?#&}b+1sO>M{WBF$3gt|8&CYzvs;&hl z3xFE>*WK}Nb>1Ii*0h|oEEl*YtiI&A(&tot0BUn^eWqO7RP~`YRJ7&P!E&I56TkWi zl7+y0;hm!X?1tVYd0_5k(xGgYgtqHJIxTxRC}yf6S5QRuiB7v7VX7_sykOh+kcVPT z073)R4?#@LVUQd&-up|TTO?9=H|T`Q@M(Bwm(b$trxzytIJV?eHNTqoSWj9t%B>ZD z&9{Q6tepC6*nS_`=Jd3^Gok0)#`#$Jea`Ic|m?PcurjeI#E0%~a-#x=F)Beru?b)1P7)2l0_Eus)kr!V4-bEGjn^<8}z~&M@|1Nj{GlipP9`a8gX##Aw%vgo8*G zO^1i`0|gGY4i5Hn`hX9{NPAyRF3C*s#u`H;mLd&2FXd25iC$fhV_G+IMMwQKXveAm z3wVf38um$i_qY1<0hNZ0C(cTG_2O;Vcq-tD^l$nVw#Hv z#21xP$57^jjm6JVHD=9GGbZ}mX;{QmD)RH5UQz$Y`kk%6l-gkvA^r5rjYJ$UkA(IJ z1t+wWNk;g5T3FYSVVAj|++S4xgYJD(+G5YY%SA?TZu;WOUtS3i;sdW&d36U}*LB-- zHHcHOd4?liuzkY)u1Ty>@Y#O+ny{r2_Lgg}Cc1GkIbsgFY!XCgai-q%$g6XU);=@4 zDI(%vYE9`>=n`;&#^zo0?Gt;zrNnC9v3&XY2R1lRTuECFt)H#Xb6&CU)+f^jg2m57nqUjo*(3IC{x59et*NiMtzV}d&13LTDJcrbRN7Ei#d)6 zCN7g*TPeMyESXo!3ef?}4_bmIqA`_UZfX7)A>jMC|l`&T}ePl=1c z#H5>jv!UNqOTS?upj)6yPI;PgxT0X5;7mXFFavErhyZ=j?jLQC1iF&gyabOEKi+$1N$Y ztzZ(s#5-QI*QmCX#U-zqKMdX4XQRG=h5bN;7E(pU_wkAnOe20(fV&kw$-U=qc0sD*cl(WRB+za# z7+F{eN*Fz=8~G51bJLH%oMvSeL>=1IHBv3;Z))0atxRv;T%GD6J%CP@-AxVBpFA*M z%dIgQ$lToxnrzJ;GtrJG<jbslTT84Bh^Wg4dO7LeT#Db_&H=>T2@(pI5f9wxG0}O)s%VOS3a=+ozeB2A5E-fs+?{3oRAa2H zu}maHJ2*_AQIlnOuw=zVhJGr8#N3JDRbiJLXmUteWscH z?kz8XqO6il#Z?o5#JWn#<9Isv}8a3f=QO zMwQ(w?L{TV7o2KydfiXO86PieYPTgGDbY^{En|4;<9R~b$Bb~W`*(=LVf)4{Lo$%s z(XIp4JeQiF@Ib#chd_3wK9k=eT(|m3URZ&otf(zD>I=jL${0!M!BL`?A1;`=1HerH_24Pe6FJWoY0@U)CK~ znj_KYiBJ`}%=AowSD;sMwq~Fx=dDMVcFSSX*SYdMTppaegrflTpSy~lx$#4&!5q~c zky`}{c=4tzcU$a&125^Eo~3*JrB4$R`dr4zpG$4+3H{(s*lu_6c6NP1Dshi~I)gZd zJNTjy$ptTjWAG*MS`HtFKEF{8K69+h*bSy!bA67SWH0dOPS?_jw7yeD+t3Mw18^I9 zdXzy^SxSs-(;ksU=;7^5W5Uf6^`rEw?Xqk9ir=Le+dNb(%ndTW+%42oE2E|v_GtL4 z<1t_?xQEPO>Pi9q354uN0=+g(M9AS}5>IVfVyCbxqs3u%O_k2MEMn2F^2KIpk8i#O z!TFdgD4M{W^AVVR9S_J}*R)%UHn?cg6M!tPtG0ja)|Aqwj92~B@3(O2U@3P+Wg7<7 zV@?H6?{Y+*n}QEpAE5c1eoL$w;I1xT(Q zI#?-_S8NZCE*}A!Qs{6bxIyjZFOV=zy?z|or0k&YAa#FCFubltBd*2@uHTf4d5;+U>>6)yz8BUWILRpy0t zFM69|3-x@m;BehC`pAT;4p=Ym6@PDIpp@Xf2x<~*Z&VT~HrF!3(HceYkaicq16=s( zLzn>P$CkvawS%1+tSQ|$$$6Zi?T}Dc@M(mwSDd5NsFrETQDPSfZfr5)sk|R+Ir@v` zW0s5B6|FC)$U19R(Q&570!}jGL7c$VghH;s!l$sUSgc*_Em;>%{)5qgt*h-~!OK^l46BuU}Y3yMO>FMJ0f3Q;4Z5-}ea? z$rH&e?DP%eN%zflFPVSt-D>9jzCMVTh>*7QEGu(eLqaYa*N(Z?x3QRiLM~ER@ow!fPzRU=VZoP5;`L+s#SXR{$y8}b zSnx+B2wzHH=O!=?DD|hDG&}KK9jwty$O-Wbna9*t^@8I=xwX$C{Eu>W_SF%_M3i}J zvoV+xx$fe}{#&Oi17(n;cqqoea0c)5g?T@rO-ViXINV;G_{=P=VewB)&{q1J2abWxAaQSB#uq%7~fAJ5;-#h#M6`S_2 z*tGvY#HKYWePnB5czh<iELs(C{|Kizo0Cwn%848hiY}1vD#TX^^W*#Og}!Y` zw~V*M3fsAV44VuvYf_Lu27uJwo*YiITJ21Z90A$0 zdi{mE3UfyPoLX?RGy=VRFp|+L{8hBDJp18~9Vn>2>*WopRQ;82b9?KM=saEoBR=u0 zTAY_w@Dhd-3gPx^1?z<#+q9g#-|Sw;D!{|kd~;=e=w!ZB*5(i#-Q^i~%B&T@S$WR( zh)9yu;t{$_)a)UTJ^Z)yJeZw(x{Wf+hnmP8=%wN5#|AmB?N_Vk(K%?5KEFpmTvx5D z;UrkOnl%nswKvYRU7$EF;^j~sO|Fm1=hy2zM!KsBstvontPM2S39V08-@jk057C8h z;)*M9Aa)2s-s^Z^E(?khU26PT9w~F=WjgjWt3&z`9GNZkX0BasFjf3%z)iN-IZdo_ zm{;Jsk}gnm8^iZDVVdr!A52mv?-(3qkg$2LHhrYr_1czcaag%sKOCQ+@h~ySNxj{? zU&(Yv#-VyG$oi;b*aIJOfu;i_xpJ`Q@lx7c!-hOza>k;9!h>y%3D}GEf$ixJPV6)M zAj&L5rU8mYKo2{}{^&DZ+$Q_HTQ}zM=n_g+;HKNh4-p+)$qmMx=F!U%VY zy$YBYN~VibLaR2xXWb~VB^#QB8(lyC;f@^xjKEn)D7*1VL#cM3CO1(uo0)7HX8pAb4Icv>Y^C9E|>&ePW?%el&_P(y)1`<^kT?@U<^T*zr%9QUWW%TT8tdawD&Yco4BuZaDX)#e`Lnje@h_72*v%?jOU{Jf}M;y&A&BY=cNE4=p9OuzGYM0$RE~YcCgFkC}DoT@y<(2 zj4eq!{?9V?x?X>$Ja)CuD^>|a>t{%N2OeOD-bXYwZFhA~OW7RG*{j=iAN@p`$Jzzn z(7Srghx2$u+;E|8W6O1?Ad+Z6kq{zVPAKBGt-dz%EZMKQJF2#>=r_Cn$cqp;)e~p+ z2zyHBYgYpm36|v{;ujSCVEX?AjL~;!VRb>&Cz8s2-)-Dmlug}p24ucT-xlXecZ5&C zzP4sOyjrF{_@n0tK7ZtMR(xJtci>5u)YdDRurR&o(- zPhY=LNf7$X^79p>!E(tCNO=^%da-D&N0i6yA9-mudA(?W)MGk_Gtk>ihMz%FLc@~+ zA=Rn*DD+Pt29Nbv`oI&geo!wFE_8=CWswP_>mhgNd?#Zy>8}?=D%MJ4JwMcX$isgR zSSmg3hlRet+e_pg6)>#md8^rW*61H|ghxc))Ol16U)fZFI{vX;pZrgdhdE9v9SF=y zR~*W$UDOG77br~1;f&rCvc3LVjWOqdT-?=t;;tgFd+`#*H29`8n0LAu2!U7x6#=8) z+^wN7oaV#n`b+_;Qz z!jd|*CC%+Ah8bgiuGbF8k!fL8aTJ@n8xugkOa!Z$97ab^LrCcj81o6nwGa8nIae%v z)Ty%bs9&ioLbAmpmsXbA5HII5e@Pi8&0s zt<}E!A-7Xqt6MTzPhL56Q-u@ssI&rbUSvM^soZ6%d19lB{aw1NvyLdj^u&G4h7~tK{tZ4_KYgb4B{XXhQ_? zt_fkH)`LE4N@zy_riWF7Fj;|UR~Im)%ahrH3=j{S$VRD3>73HEFb2N|(LG2f)XaZ= zXr1emOl|vGNByM}(%n55`euSRKPU4G7|2*#h}TAuX8|z_cY}Tr{Vo)&2uEPR3cYtkzWa}Ckt?7- zi8bW5B1_iZ6Vwc>)+^7HGP6Vu^5Aw)-PDL+ZY+25@77bp?mw@#LR027*$HF)d{@3> z2D)TA47pizR+hf4Wil;4diSv7YFMC?Ip21^xaNcZL;*9xhwX5DnQ9uP>#Ge3zeZUp zPrrVvf1W~{aqXik@7^XDBhteTVgd4K1|#fo>v*X`N1wvo2CK_Ndw_CekLg^9r!7s4 z#!PC*wtujGN8TE7>^hjTF!lgn1K|ebbI@MPftUqh7Mi(!LG_3)M)2S52%Rr-F7&Onjy7T>aVG$_qXLgxb# zUV`-c?2+O)ptW5bOo=-5gXRX`Nn3)o%*_2=nd`e-YMu1$>9rNkg0tLNJGWgWu<%p-CF@ z>SNsZMQopZ4IAa*XJopHU!_AhCmCX;YCruTeJ8PW0)i+2*_Zvv9V|ec@o@i7|knUGaN$Akxj~_N{&N*CvBk}MF zf9hA05q`6ebVh;8$YC$CG1tr5Fwc|AdjRQe<@9jQs zwb`{=wich9g-H;&tM`DsBcZcob$Y*ATlaYg?NLYI?B#St@#bx|yB!ZQk9&LKwKQ{?M!6_Cu4kGl&5OON zqsrIuEzOX^>Z)#a^K0A=QJeMZaK?5zT=c1;fOlOMGQdIaIfrh8feW;|hqZ9*p}nuqKK z-hkDX&9GTUQ<~r4)m@UYDR8L`P62Weo!yWL|HzzGP@w2AwurT^>cZXA&%&M9il*#f zgQVKJZ0AbD(}!s;91r1w;D*DyX^Q7`WXL9Q6>&9|JV1t2$zh_yLiRs+T={<&T37=p zG+ZCx3u2J!qF5`9+45aR1$KrkXL{~emSPWRE*NcRp4?E5QCIs!cyPY~*Ai8;K7F#R z%F=>rZqed-GRU!55idhV_UwwSg30~s072=$9z6N&YhVg^E(8rEvhu1@V=iwPxJ;LQ z2vBTFmm2n+yb@1ruYX5Lb_}g4ikJE?N9_O3=l_ax20aft8Vdx>sXQS+_S zr>2`QHHH1T9gIs`c?BTChy#=uz!&FWxrRtur6LKzrvdF39g)7KzhcjUCZ8MTF{k+I z^!3sW+3Kt(bZbdTCCTEm;*XljjqOmkr>7RG-Dc-ka-eN96Pa%2Dfd)_QI^KLI}6Bk zBaO-XMXf1uAVwl1E}>^JdjJ{71vH6biU+q>>_<7%?i}CHfATHx)+^eV1j~WH6iGzs zGQwnEi+FG;u=Z9KmV~v(yk3RtFFBD%TU>J@ALknl$)C?WUYT#sUYS4o$+yTu8vEd5 zbK2Fsc)Bvb>t$jWU5uyhQPoUTIErcO`cl5Q_jlm+*z+UK! zdaA7Dv8Y*;zHX_~N1HaN+4MsB0`Rs{h)e`LuKhF)7qX&G(5YxHFU73d?M#Ye#$W0T z7QamOe;m8SN9ZN)3?&(8o3lW~8ax-pSLQ>dUA?p(q3%v7lnRy{qPuFv9mZKZGxN~{g!+9{Eq4>+*gtg#Y|u_ENjMnSbqrFZ)j zh1iOYTw%N|jeDMjT;DS`jpd9V&0Eewpmo{5SWFyj*HV%Y)M0|~F<=eRcl@7gNQ3tO zUPE(f6P(4C5}%lQZ4cA#7O|Ay88u1pP;G%_9w1>h4mBbcj2Vuif}2H_%ZsP>Cq{q8 z^GkJDEL`5|1Uhu$0nR`aN!(47{Lg_4w-+lRpE@B=+Su7g=$l2=q_`-R*-P^muY2%A z&{RD8f$fDd8~|(rxsG~6ZI6MhP87#UuD$W-x)-dpGvnRonk>>Y$;?}S9Y0!{y|RjX zeLS7Mh0&`$%xqsSKhqeN!2fXj?PDIl+4;xDW_%TuZnWBVnDVKjD1KA*e`jxx{(r79 zuer1}pZ1j$+qRo?F@&n!%@5pqw=R$@WBnoJ8JXZ*lnXRSI0O-Sk8$_x|eqr zo+op^@ir^o-;Cc%{8@evyL9d$s(_tK^8wq3E`ztKR!^%5Nc=f4L4(AsHi-<@Le?$A zI}FyUm!F#9m9i_OQZ)*zeoXqkWU`&-Hj4~rcUG5wq^m5)I2a;J_;X=NLaT!I``18B zA&>AHQS|ZZ1{<_DLIVuigVn}uA2k?)!~%9x#JqCuY^L%SKZ9hCXiiU{E)+=?0b z0^INb-&NefQS?ccv|}l_6mu`LT2}D=Gudd>qbl))1RG-dmU$c;ZH`9(MEOh%VR;gHux%?TJ4zr;e zgSxef$YdaGIhJRnj?~67(J;-Rk+4I%TEQdb06;aDJ}!@i0PeSowM0-8J5dVE;nA{6 zA0`JP11g-B&3$R#uQB-t@PaU`r7Nb*@>zXSJQ=2W^5Fc!jral`rbyHy#Nix=^Tnox zankYpQ?+Xa14?avk*^o)ldW!lPj#^U@v+cRJ(1;&A$c0=@`=%fDrvHXi-pA2iW7+` zcnlmGEsDAB{zH_Ca%9Rd&J?H}1DQPImLs8BWKgrAdw*~v3GoCENebg3I9kp8F*oaM z??Ov@3V$&Z8x;z??GnOjs?0o^qGkJiBe8u?oDo7X?g=0$-InZg3?S}^HJh>zQDN7< z<#Vq8at!nuJqo;Mn$rer_W}_qbaV86$8Au961k!~FJK|M1Q620fe$Yo@#lyZo+YZ# zUI}~r-C8ebUiOirvz*RPg=EK<)pAmLJYgw_Ud1p4!o}$b;Sb*Nv@_juwiw6U850wt zTx8@Y{o{EWHN2!V!k*KMbHPAhsfjElLkz7 z|6XP#!TfyusGRLxo^GDkOvw+`?c2!wtfTcL0Y1`3<+lhY&9_{j9wcR?&?@%`g4%JkC?=4fhU2eihdfhoisQ+|Bp^6Q8l)*pKA@L}Gc?3| zmaeVRxq)tmXjZvXJ7X)<1y~1XvSirWvI2_`&DTOlPR+%>2?%k%IwzHPxLzoRO%yN^ zjii$__z4vKIj8mNAeXcY6ZAn{9Ul+uc+o(>MZ6`cWP`_7Q3NPaT|oEH^-XvCrlvF-@!)FX)p0?#t~-Qmz|}AUV$*a( z^6jd67GrCvi1r?*stt>aibMI0!=94PRQ2Vj(fqct({YJ|WZXj`wvR6BG-1*#A@*|` z5`f#+-{usZvWB6&6(7ZTpWNWo_YZ`!Zg4`ZAcy3<*9>PS=_aYm)7cjVbU4xcD zn%4h}pN~8Vb)lzkGbkMnb)?yI!XGR?qf5MfOYChU51E>U-{k%oj_Eg6j0ow#8AnJb zFlG)l{q<}2xbmH>_B1@-Ws{67!0F0cYxKrP-t6W+Q5!l@celF05uxTJ108_B<#sqO zhT~H@`mw>|r6~MoEB$ArZSd|#yKKTr)mPH%FyRpRT!;I`eWE{JWEi=qXHNiC5|!|h zT?Ok#!XqoT%Ac*{YbHze;uJxuGRYX9&l=~-I#K2B4m`~`X(9{wG#{@}z@WG%&47W6 z!2{%}eZJk3ATm2W$zk8_G2_-a&h{}5&M&Q*4+yBM&4Os-xbdTrQ(;~W%l7E#rN z_@2V%S!v(Bz$1QHo>bm~X^{%JWtk8jOat$i@&fwJ25vPA-{wHT5MmAnlsq>PZYfD! zw+YYJ+K*!9+fKS}x6I~@9+}Rj{oNHZK$Z%2XY&{|i@Y#iqTyuc3cE)5ePI)Z?xMU* z?GVSN#D)ptI!H>hA-2W4s$J>+b1gPm1x_$im5=nx9bcpMH?ki3Ti!6XOGA_%1f05D zepCmqT}lH+X!R@WW0&L)B^Ld2y-P6$?rwIQ9q(=AQV%tQUnHK@Rc)-(_d(ky={=Mk z*wKQmxcCM))6NW`lR-bT=Wc`YWMc6pdM*6-e_e-KCcTngDUWRISVXfWWO$U!xBFu% z*#!?f2R*a;vdK5lo0SP|P0VuvX^E9e1)W8YK4#c7o-O}f{r!MMsR?L= zK4mi!b+we0M((4XJEB6fwdN>Q?-2R-Go{hJ(^cq5F%h+PwFx?zpx>JQQj#ZNy-@ls z`O_%EGM;`A(J=J{cFpUUpUNMO ziBb3eZufzBH6#ei1Kmvk2>T6!&#S|BZJD~SS+y*E1I(~XVQ#Nx(wes56z3(NM?PWg zqT@Cr;aT{ZlQZSSbG5O1pG2abCUK9fdC|ATG$oXF)uS#(+)m+ty-Uqb~x=y(A&IgXooB)$UVKFwat zg*SGs#;oG>5sMTFP zsosVkhJNGb(45}x2fsyJBT*;x;t;)mrK1o{RH*q=ok%tZZ@Yuv>KR6u*WMpD)Xma} z3fdx^NvKr8dc5**Y8oSJnv)ad^Q0653cKxl+q& ziwUljhI)QScbe)jmYoyxy{$PPLTPw|&N5ZZ0%kRXm=pah2zKfI25L6L$72_Iq*j5TBYz2ww>-Wfw>VcWZV=M4F zK+awn!HSYZ++W%6ShBys^0Mi7{oqeilP49HEjbwVpAAx+K`+FC9H%oJeQ)-nQYG+l zkrNOGUvOA3U%F!c2iUbNN?)sQ&HJg9k&o=Zy;X`?^7)waHT%@=@*ln)z1W(pde+ME!3i4c{|N!A(^L`SLs~<-Aj88;la0m^<)@ zbDz8SVU&1pU^KJJ(zIgZ&KC{n#Y{!tS=U+FN_%C1-j?hB(+Z*`K^YJtaS*C6Zow0V zyKdo+qF;vGdyZTHW1)?~fnMIyX@chIR8@&Cjwn_$vSQ9_1&cWw5+t4w_2~}pK)f3G zyYDY`!wTEl(z#mR8pF_gZH_NZjal@6wOdksyWg`BzrVNUli0_xIECbR-K8(;mbuAM zvRRh1xcy5-$zGk7@=3;dw)&Ev=Tt_nNqhU08qnZ!FxmUBHrM~#XV8DgW!#6?1$a`F zn$dnB+JyEb%?s^0s29pcu6BUWgXQ;aiJHP`wTy<(*J#4;exe85;y@Zv=L|`RH1l^Q zrV3sRo*~L_fDicBVM>%mf%}^4yzgyCI-i_J9N@Fs!14qEHFzC+e>ct~p*yDgMhCqs zwqU>0Yv3%5DP|z3;unr_==d$v(f(q7A=m2>FrTyTUhhBB{2% zoe*NU9$d@*5JGqFG}p(oMLEaU6D{a--i+4eQ%=)Own|fBXn!8V@oTGWWyn7j1cZEe zKzN7{{01=pn2&^cHD%zoGO+@c9yz&I#I_k`LwEXg-r9aoNd_hv!P{v+1jU-?_gnCe z8=wK?g0vpt%SMvi=-EK_qDg0SI8e@ad5QamJy-fAxneomAU0Yd^FZm z?`J_3adPk~O49Ve;>S3i2>qT!?CbP_fS;FGCGT&|0B}Qd2#^;XVTB7f=xOyT;fDQP zLR0%*Qb5x{+n`LZV!^$4qdC$x{q^T@HA4Zlfk)kAtv{v9F~Gs3sX=D5la=#WJnU8@ z9!V6rqyFQ(lY`n55ip)pRTSHHpfmQ-XOB<#GDn>fTT4B(!Z+i2ga+|>WVLmDni}D7 z+ir@V6SOi&jM8rQ&Z6P@*?jXWb;!)$+c1GGNhpa*ajjN-!>*h5c@uhb8Wv*69_1+r za}k^!{Fx>}E9c&2?b4j=|Kn(zqQ{jnljn?&?h;9KN5Z>}5(YFVQy$~$kl0Re?`d1r zZdk}caQTQImp&|PqBYv_wCRox!klM8*c8c--L+oqS~VIAk6T7O<% zE6laq+`h7t3cp8M52zV*h9@>|fu!dDk$HkRz|P9CT^#V6&>Y2-aD_?U6+64#@KLLM zKi1at5&glA-fgot4N1F6zpsu(EgwNRG+peOvGa)s#85BnJ=cD&B6mwJ-hsalFx7TD z^HL4!k3DaF914)K&=2`?13W&>LR7mDBC6mfBT=9mRt~#hXeC_P7^jXDOy#^$P^RO} zVk*i=CgS|%^}d$2_WF&Rw_i21&43gq74YQ;nYHfZ16|TM>4=HWwQll$ca82U6ruUc z!`9~HQ*eXL`;W%31D9lL@-aGtJMHhedhlKcrFa2dyb=<+j*R*4@mDw(+m5&~9Hf{^ zGrh|`rG0wHuk@|@N*Ww(`P=XoY!n%e`lZ2t85hR4cNE~r*NwDtYa2%;)wP${EcG?# z^ts~&8FQ1qH&r-rphdeT1vQ*JSgX6*0e0W@z|((Za)(5O8=-A-DD8zlp}XPYuKxU4 z$o;Tyxfz=iwsd!UgF?g6>Q5~%8x=2^*E_dkQRCz~Q>adnfowjm8SE?&mztajQ(&Zw ztPT{sYMrIOD9@$m;Ut6`Ewq|Xbzf~^Pf$1K%flo4xE^Nbs1m3h<~=(!c}L%!scKYu zLDdFd-4-@Vt=W=Jefa5(cGh`VE?#6m04O;J=r_Rhgb3^Je(PB3Zbq$&1yoI%mUk$W z_anZ@&en)At|-pCOUcjkL2@Rf7t}Br)kW@h`uFIKZaKCV{1B}es2Mj1GR&cy0XmQ{%j5){>xr{*zaaw5USyG4CZx(2vJj((n;)PgK3_+&5)zEjhbVWy1)?S_fG6l>_V3M7 z{{e{mf?%RETx05IoynD&nSfc>rG-(DqLau)PqOG@7N@&ou;^cy% z?Ps?#WoF9Tt<0iy_lG$G%70j7`-~FVFZ`E?X1K{1SQrc31!Il(?5^_2as8oNRs1;S zf=AqLDYWaZpqy5-)#I1%JbmtPTi$59f^R>KpXpMC%lGEQL>;JWmO(W*X8-V73~igx z2Y7iq`a!AHy;z$nZ)x3;Ns+xaChD$UmM9)1+;}0jqdFJ$siaE=FGfi34`>OPa}ZK~ zGFC|`{g%1V`dKi9eTS6PAH&=}(E!P-*QYe!jCh&9tM=g! zcc{)r)|T2(zQ_`ve(G@MCQFamVylg1%tJN(^Y-=a19v>R2?6p6*$=w81n&wJ?+pUS zp%zd^FYbC4r#y*U;V{)fVf_5}Xc7m8y+G7irUZ6%or^%|s}x3fch3ryy<=}a3%V&O zz+~2GrfXbf`VZ9mH5av4<5$77J8=OI#I91%7+me-^ZF6dGuOp8t**EZ%o$Vqzc0dH zPxoa{mM`_Bc?3DfH}qu-9V|)2^F1~qi@q=TjDzONf(N2OuW$T8U%oQVs3g0Y%PYC5 z+K6kH+g;%5+}!Ao#R16{U5tL>pY(4(eTP|&j4!Cr%9?H&Y!==T(+2Cq0llv_3acb)!HSM{(eXrDxwIvCREJ+@w|R)wtJPV2mSuR=l3G zWndzlb>mCZ@w*4S-(IQ2mZ0DH$57d(JOxHXXG5AyfJ>ViME`(n`UJv=*~>|qK_9qn z)axzqct;7XSiX|aq`D%Q{w5H4jr28)0;d^&lH@XNJBRc|(AqCUeg$2h!7JsK*)7+^ zPBK_o1pG+XPnDR~uNQy!#yifgqwVl%=ZS{TWd;HYNer6o>yEaG8Cs?KI+w3d54Gj> z6#toj`1?sSD(&a_TsTJ-|Lb>;pNmR-5F;1!7*=C;h{<2XZqers)F00?)QVhX~LP1o7on ztmyo-8Vg|7lK3@;>lMuyI=&_?AmQK`=<$K$L^`sdQ7R7CI2@qKEKM}1B_Qo`4P$hY zk8H9ih2r}tCN~bb>>6nheA468q+^6Q=jS)Lf65uro`t{ z(kLao`S?(&cW8xygp0lIJuUOFF`w8sP`7KZF7p`p6GrWn<-DYgF%+FX107c`0toRJ zy(=7qa(qJ1DlQ%l9{g**Rb1}SUnxl(ol|-ZQ3cbd-qcB3;ml{VeBDdtr`*}T#;&ML8E@5nDAC&A0)kKULhAeHGJ=0brSIB$WS@#cKJv4 z!}c-GwI6iDU6a!fX6sO(_1$5>&9X(-ugI z_B9BcqV2QAmK&wE zaeCCzix)Ao2quJ!baXfCADEs6KEgrYPZCHb6ZC#0VD?soUG`g)WzZx~RZU7e+1uke z^X~nA=fF>sMTG64Fd@9!c%?hnyVjYyXUoiPEwanD!#qzggIC4X9VeV)4ew~m{}eSJ zH(o4Uw4anJIPJxk<+OBNpY32~CraE2(SfdGU|g7|zh9Q89e-Ob?d^p6Iz_?Iy6-MO zwl%=00?4gbS-N3f3?V#d;=Q5}1KdRR`Xn3l_wJqM#ffg({Iur3p5xO14>Ij=PdS$( ziniga0c`XwCDH-Ex}UoS;)4dw$8XO3v6r`+MH#8tME@OfL&G%A9=9mf8+f&e*eM91 zbuyhT==#YR6E7wgq#u2$pYnQ+N5*0C8ZmyMeiX5!*=ubP2zS`U!^dfu5%-^(*?j!A zeh-glKABRAss`C#FYzFdpO)kFk`T_(m(`wmbSc$B-u>bBaWRiNyj(i4tHlU=}X&W|JK!4O!8vrK6A)zbM1-5AHRMv0vFDVCX{ual9Q$p>w zgEuX$wMU&_9XA$bIld7uYQD^tkg6$+80M45E60np%%0S~5d9e z-H)_;W+d>qUSUfpIOqKID@u2aZDx5~v?&(Ghg~knHemvuPMnwgRrvw}Q1*G~|^wu}Yoe zXDxCuXZU38v@#6RAku^a@S=6sm@XlAAo#ovI4Ny}tIMGb9GvDkp>obSz^>m@EJwCR zrvFQ8EAFnn6;o#3WJVV|X>k#t7_YK{2s+QKi1Myg_DXrKRm0v$0rv(LE6=9kc+Tg+ zpQyDR-QVFj-wG&b@RF7+R}^vPF^P2wyuwx}-T>{XcRO4sLGce4XZqNzhZnFp+zECO zf^yo04OhR0!MX~|!oF&7wcry7^A32gI8=3o!hxRQH0|atWfLa!ab@n;w&**D3q#)p z(gHW#dbst~=RibR_Hb#S3Sdx8#_j7KvpyK%jO__!a z1Oh_V<9MES$}}Y-HYg583PXfv|E$ui?#!7$c1S|axXp~V{Ia&t#9E~fbmc;WgZ%@m zuE`1+!%+s-OLt7N`)~*x-X7>xtvkeQT(avgLGR z47dYZ3Y~UU#r@8@(u=ze5V$57Hw(0G*)S^$U_bS?a?Gv#;K}Cg@npDbOWJrb8CKBN z&+3(WXSb61&JOVVX@Rp31UN|J9MZAvds@$xO5JJ4FcSEs8Y8hAV!}~5DrA#%k=1m1 zHbl&v#w~aX#Gqp!H0}`S^o#fn~eB3@pK*@;hly%W0b5jmeD#IMO)7Hr_w%y^>dAY8fOdD2*Dq zkS8$N-#*iUw*Mm&yBKUH$-g^KJ!C(fmsyr@JcAz__I;|nehQfK!|&45kOo)ia4ku` zDa;G-0!XNG|KjFZtim|$PZS&5Sx#&#b9(a+@a=RTez2U2(*#S@SE-W=>J@eI)29Ib zOCHd)TK0>lBx!ITOLlm_nDghm#l9msI8p2c6&l)0Frk4`*$FZ$=m@4Cz8TGpR&N1@ zA>rA*xV?3eC;_~-d)rF=^R;Dmhxx4;RIKmV*azv_ftoC(Es+cJq3=$j@1IIkaWp;) z^}z+7^5R5#=W&~*1#v{ZI(G-Uc)#x_%cfhDk~J=EafMla$FZaR=(KT7B@T8~0~-&emEES&9k%@ioCU$C^ux7#-A$yLkzu1#bj8@Q@r zXQ`;xdEMRWWHK27k_=-cG67TT$>N<=3>!KZbjLunxg}YAhw<7O^UY0}UZ$>dJKaq) zP*!Vq^lH=PQ8IA%lR~@d&PKH&(>!;0eJi_Kr^+g7YUP?`xLdJ_oUzDPiQDkecO&`v zzwz`v(i^K(^fU{5LCBc0WGfM%b^EB5aak89<2;h;l&td-2H!GZoOw42-D;Qq5zrH) zn^LX^u+<0=kLP_apPI~0bh;}x1^l-2=#l*4cOl!%ILdls;O*563p<&S3H7Y`*Lg>U zzk#VB0y@bD&8M9K#_Ou}>EPKSLNGSeb8%whiN$nL=xvgkkGS5cZlJ3ftze+@gKv+? z5Q=IkiIC>iB#kR@<_Gf~h(h=)h@G2Pjl-aFoFmG;qXFUloO$uGXv}KcbC^SJ;mV_*;X;&EOScLrqETK{9EWv`i-xo=-rgz!jMX0=&{nJ^imK48D*A*8 zT}J!TvjBG~N5lD|;kJ|XKqgi8lg>A-Gfho_`FFP*owVgB^OSq)6AQkc40=U>z<*l)Sea|hssZ&37XJ9DR}^IRI0-o-1Dnb=)6m^xLOSIZY2EeSAB#VCd) z{L#CciYKK&HdH_4iV^OpA6aC-*M^Lf5^l}f4ZeKgmxDDBQn_^#eYYqHPRV0Z;-M;K zAo5)taK*DAVcP$zWtyR_v3{g3R|P~A}L5!pYo zuaz*;N%vpb>iXKb0hywB(OAF<{r?DN_rG>s2g2gAFzkvOS7MuP`$h`=f_=}RQ+TNH zq6%Y-i^V&Ju%bGKjHdW6WAQ`lxDi7}3mLGdOZHaqb=LJ&+;n#%H!X{)EFbD`-d>Fp$&IBS;?KO#Xnp?q1|IMpGmes_R!{`4$ zxOU?&_<$Nm4Vig?R*>d_hn*h!Q}Zx*_@njrgT-nh(@$9US(zh?OEQzcNh#CE8CwO_ z=hz-M{Uf^tB-7Fr0}!kK*0L$A+V$rW&i1Lkk{|5d{mQ#}J|FAi& zU3vh%enU4$} zV$K}DR7BUeKEi^BY#eqVMpDx)#R}yg*My7KV#fPkeh_D@R-=q>EvrSxQFVBZnYbxjZ07FE6Pj@Fy0*>k+ev$8VR(0QWP(q zcmS}=-u&-=Xy{yagg9d9uaHI@)NG^cKo+Q3qrT{*nn)AsIL_qFV`AR9sjg+tkX5{Q zZbVRRNY`bGaSZ)N^>fFBQe%9^mlx@FXpfPKJ|Y@5H$tNMn;Z{lg07RM0CkoIKjhb< zW#pfNCc8cSs(~{zW|I2Lw7uyLhauPb>%NJ0Z2HSLJO{&y1FK<+1&z7|MLR3ETk-tR zd!{RfU!9UickP=~AALs+^zsXHBV;)w(2+^?nJ-kz@Cs?|IW1Mk|Hu^23iV#c!{@KJl5YyU(*fJ^J+PRH zLRhbU4oD2q+L&Ar@9h%A>+~->pgITJ$M_`n0r+Zn{02j|nM?n;X>_a>YZhxH&bqXwvl{ThtH8JRdE-wePPp>bIScC3KarTWWY^WGjD4<`%q-Zv zK0p88>RpkE@2*VY&h`^7OyP~EAzv;tHDq?fSU%&1a@Q6*M9VyE6att%l$oySfBN#l zSwHKV)dobBV+E`@a^AOQxjV=Lj;n7BL3^BS8Dk7apSJz(*jzRA)93Faa7bBI+AP)D z@3C_9O^$v~fq6|eh4H5lMCgaQ8Q~Pb=6b6IxAFmNctK137}a5qaG0}kYdWyNJozGL zP|=Za?J>~E_qpbO_%L$oNlE-=M^XI7x5h+cwM@$(;l)+Mit={PmEFCi{V_Q^CGrpJ zv$?>&)|hT9h&fvmO~65Dqa_p0PDyNMIHNZ;nBUq-Ry+O? zjGRL8Z-LHpTS}_5EuiE=?oGGdODvDC|KLXv(?6tMTXw{2pW}Y78EO#Xy(p(oEcN=~ zvvDt_M?$^u-D_M6&YT2K2!-Jq7 zdH_l*GM+mna>OyX#9%!^nr7sArN`g0Odio^g8G{RMt&Ejs`?M#8MX*A@pyS$Y8N$- z#tWggE$ak?Vg0V&z~ddW)2=CpV=rCgx9LXrN^1ijJ$s%~w2_8SFNqh0Lqs-pQ(9lQ z@@>B>DL~onX2o!8`2Del%-xm4l^U=}aZ&uCfsj*((WA6VWtH~k!_{0WQf2pAZa1(x zfReN|g_Lb)`G2hr=CqYy$d9pJ??|HzC@HI{(Gcs=D%9`IdaLvF~f+de;%CMA08?WN*Gi1Bl))`$~FOMBbe zSb#J7l;MBi-25N8I{#PxcO@11OI11E*RY$G5WlE~p(Z#`#|m+$>Hp+#e*Q}fIc{W2 zq7=H2^-xW0X%32sl@LgcvxQ3)%6>X|D%p{ea`wI;2>2e_UJ*uTr1#5aCqGWM>IqML zEOBl$gaW==S(RCUy*&F#Mhc+MyweDz?s{Y zyLqhBm&n9_WCh5-=b(E)doMczxS_EIA0oE?yRX24fNp4R?~%H+{(p42!;U7-Kzbp; zc;-cC$Beobg6Ac*?WEt$q~T(Y+KmuIThGP(j>FqezgF0$B-NGIF>d=c68!p-Yq2o> zjl}2XdY`VMe$~Cyl|uywTemp=dd#s{jB1 literal 0 HcmV?d00001 From 061253ee0ed8e03752766e450aa18bdb5162c750 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Mon, 30 Aug 2021 10:14:42 -0700 Subject: [PATCH 032/128] Break up and simplify TransportFieldCapabilitiesAction (#76958) `TransportFieldCapabilitiesAction` currently holds a lot of logic. This PR breaks it up into smaller pieces and simplifies its large `doExecute` method. Simplifying the class will help before we start to make field caps optimizations. Changes: * Factor some methods out of `doExecute` to reduce its length * Pull `AsyncShardAction` out into its own class to simplify and better match the code structure in 7.x --- .../AsyncFieldCapabilitiesShardsAction.java | 158 ++++++++++ .../TransportFieldCapabilitiesAction.java | 271 +++++------------- 2 files changed, 230 insertions(+), 199 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/action/fieldcaps/AsyncFieldCapabilitiesShardsAction.java diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/AsyncFieldCapabilitiesShardsAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/AsyncFieldCapabilitiesShardsAction.java new file mode 100644 index 0000000000000..61ad51a09078d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/AsyncFieldCapabilitiesShardsAction.java @@ -0,0 +1,158 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.logging.LoggerMessageFormat; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Collections; + +import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException; + +/** + * An action that executes on each shard sequentially until it finds one that can match the provided + * {@link FieldCapabilitiesIndexRequest#indexFilter()}. In which case the shard is used + * to create the final {@link FieldCapabilitiesIndexResponse}. + */ +class AsyncFieldCapabilitiesShardsAction { + private static final Logger logger = LogManager.getLogger(AsyncFieldCapabilitiesShardsAction.class); + + private final FieldCapabilitiesIndexRequest request; + private final TransportService transportService; + private final DiscoveryNodes nodes; + private final ActionListener listener; + private final GroupShardsIterator shardsIt; + + private volatile int shardIndex = 0; + + AsyncFieldCapabilitiesShardsAction(TransportService transportService, + ClusterService clusterService, + FieldCapabilitiesIndexRequest request, + ActionListener listener) { + this.listener = listener; + this.transportService = transportService; + + ClusterState clusterState = clusterService.state(); + if (logger.isTraceEnabled()) { + logger.trace("executing [{}] based on cluster state version [{}]", request, + clusterState.version()); + } + nodes = clusterState.nodes(); + this.request = request; + + shardsIt = clusterService.operationRouting().searchShards(clusterService.state(), + new String[]{request.index()}, null, null, null, null); + } + + public void start() { + tryNext(null, true); + } + + private void onFailure(ShardRouting shardRouting, Exception e) { + if (e != null) { + logger.trace(() -> new ParameterizedMessage("{}: failed to execute [{}]", shardRouting, + request), e); + } + tryNext(e, false); + } + + private ShardRouting nextRoutingOrNull() { + if (shardsIt.size() == 0 || shardIndex >= shardsIt.size()) { + return null; + } + ShardRouting next = shardsIt.get(shardIndex).nextOrNull(); + if (next != null) { + return next; + } + moveToNextShard(); + return nextRoutingOrNull(); + } + + private void moveToNextShard() { + ++shardIndex; + } + + private void tryNext(@Nullable final Exception lastFailure, boolean canMatchShard) { + ShardRouting shardRouting = nextRoutingOrNull(); + if (shardRouting == null) { + if (canMatchShard == false) { + if (lastFailure == null) { + listener.onResponse(new FieldCapabilitiesIndexResponse(request.index(), Collections.emptyMap(), false)); + } else { + logger.debug(() -> new ParameterizedMessage("{}: failed to execute [{}]", null, + request), lastFailure); + listener.onFailure(lastFailure); + } + } else { + if (lastFailure == null || isShardNotAvailableException(lastFailure)) { + listener.onFailure(new NoShardAvailableActionException(null, + LoggerMessageFormat.format("No shard available for [{}]", request), lastFailure)); + } else { + logger.debug(() -> new ParameterizedMessage("{}: failed to execute [{}]", null, + request), lastFailure); + listener.onFailure(lastFailure); + } + } + return; + } + DiscoveryNode node = nodes.get(shardRouting.currentNodeId()); + if (node == null) { + onFailure(shardRouting, new NoShardAvailableActionException(shardRouting.shardId())); + } else { + request.shardId(shardRouting.shardId()); + if (logger.isTraceEnabled()) { + logger.trace( + "sending request [{}] on node [{}]", + request, + node + ); + } + transportService.sendRequest(node, TransportFieldCapabilitiesAction.ACTION_SHARD_NAME, request, + new TransportResponseHandler() { + + @Override + public FieldCapabilitiesIndexResponse read(StreamInput in) throws IOException { + return new FieldCapabilitiesIndexResponse(in); + } + + @Override + public void handleResponse(final FieldCapabilitiesIndexResponse response) { + if (response.canMatch()) { + listener.onResponse(response); + } else { + moveToNextShard(); + tryNext(null, false); + } + } + + @Override + public void handleException(TransportException exp) { + onFailure(shardRouting, exp); + } + }); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index 3f39d47613002..e997ca1af0a56 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -10,34 +10,23 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.GroupShardsIterator; -import org.elasticsearch.cluster.routing.ShardIterator; -import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.logging.LoggerMessageFormat; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.MappedFieldType; @@ -56,9 +45,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.TransportChannel; -import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequestHandler; -import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -72,12 +59,8 @@ import java.util.Set; import java.util.function.Predicate; -import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException; - public class TransportFieldCapabilitiesAction extends HandledTransportAction { - - private static final String ACTION_SHARD_NAME = FieldCapabilitiesAction.NAME + "[index][s]"; - + static final String ACTION_SHARD_NAME = FieldCapabilitiesAction.NAME + "[index][s]"; private static final Logger logger = LogManager.getLogger(TransportFieldCapabilitiesAction.class); private final ThreadPool threadPool; @@ -121,42 +104,19 @@ protected void doExecute(Task task, FieldCapabilitiesRequest request, final Acti } else { concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, localIndices); } + + checkIndexBlocks(clusterState, concreteIndices); + final int totalNumRequest = concreteIndices.length + remoteClusterIndices.size(); if (totalNumRequest == 0) { listener.onResponse(new FieldCapabilitiesResponse(new String[0], Collections.emptyMap())); return; } - final CountDown completionCounter = new CountDown(totalNumRequest); final List indexResponses = Collections.synchronizedList(new ArrayList<>()); final FailureCollector indexFailures = new FailureCollector(); - final Runnable countDown = () -> { - if (completionCounter.countDown()) { - List failures = indexFailures.values(); - if (indexResponses.size() > 0) { - if (request.isMergeResults()) { - // fork off to the management pool for merging the responses as the operation can run for longer than is acceptable - // on a transport thread in case of large numbers of indices and/or fields - threadPool.executor(ThreadPool.Names.MANAGEMENT).submit( - ActionRunnable.supply( - listener, - () -> merge(indexResponses, request.includeUnmapped(), new ArrayList<>(failures))) - ); - } else { - listener.onResponse(new FieldCapabilitiesResponse(indexResponses, new ArrayList<>(failures))); - } - } else { - // we have no responses at all, maybe because of errors - if (indexFailures.size() > 0) { - // throw back the first exception - listener.onFailure(failures.iterator().next().getException()); - } else { - listener.onResponse(new FieldCapabilitiesResponse(Collections.emptyList(), Collections.emptyList())); - } - } - } - }; + final Runnable countDown = createResponseMerger(request, totalNumRequest, indexResponses, indexFailures, listener); if (concreteIndices.length > 0) { // fork this action to the management pool as it can fan out to a large number of child requests that get handled on SAME and @@ -164,17 +124,10 @@ protected void doExecute(Task task, FieldCapabilitiesRequest request, final Acti // (particularly with security enabled) threadPool.executor(ThreadPool.Names.MANAGEMENT).execute(ActionRunnable.wrap(listener, l -> { for (String index : concreteIndices) { - new AsyncShardsAction( + new AsyncFieldCapabilitiesShardsAction( transportService, clusterService, - new FieldCapabilitiesIndexRequest( - request.fields(), - index, - localIndices, - request.indexFilter(), - nowInMillis, - request.runtimeFields() - ), + prepareLocalIndexRequest(request, index, localIndices, nowInMillis), new ActionListener<>() { @Override public void onResponse(FieldCapabilitiesIndexResponse result) { @@ -201,14 +154,7 @@ public void onFailure(Exception e) { String clusterAlias = remoteIndices.getKey(); OriginalIndices originalIndices = remoteIndices.getValue(); Client remoteClusterClient = transportService.getRemoteClusterService().getRemoteClusterClient(threadPool, clusterAlias); - FieldCapabilitiesRequest remoteRequest = new FieldCapabilitiesRequest(); - remoteRequest.setMergeResults(false); // we need to merge on this node - remoteRequest.indicesOptions(originalIndices.indicesOptions()); - remoteRequest.indices(originalIndices.indices()); - remoteRequest.fields(request.fields()); - remoteRequest.runtimeFields(request.runtimeFields()); - remoteRequest.indexFilter(request.indexFilter()); - remoteRequest.nowInMillis(nowInMillis); + FieldCapabilitiesRequest remoteRequest = prepareRemoteRequest(request, originalIndices, nowInMillis); remoteClusterClient.fieldCaps(remoteRequest, ActionListener.wrap(response -> { for (FieldCapabilitiesIndexResponse resp : response.getIndexResponses()) { indexResponses.add( @@ -232,6 +178,69 @@ public void onFailure(Exception e) { } } + private void checkIndexBlocks(ClusterState clusterState, String[] concreteIndices) { + clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ); + for (String index : concreteIndices) { + clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index); + } + } + + private Runnable createResponseMerger(FieldCapabilitiesRequest request, + int totalNumRequests, + List indexResponses, + FailureCollector indexFailures, + ActionListener listener) { + final CountDown completionCounter = new CountDown(totalNumRequests); + return () -> { + if (completionCounter.countDown()) { + List failures = indexFailures.values(); + if (indexResponses.size() > 0) { + if (request.isMergeResults()) { + // fork off to the management pool for merging the responses as the operation can run for longer than is acceptable + // on a transport thread in case of large numbers of indices and/or fields + threadPool.executor(ThreadPool.Names.MANAGEMENT).submit( + ActionRunnable.supply( + listener, + () -> merge(indexResponses, request.includeUnmapped(), new ArrayList<>(failures))) + ); + } else { + listener.onResponse(new FieldCapabilitiesResponse(indexResponses, new ArrayList<>(failures))); + } + } else { + // we have no responses at all, maybe because of errors + if (indexFailures.size() > 0) { + // throw back the first exception + listener.onFailure(failures.iterator().next().getException()); + } else { + listener.onResponse(new FieldCapabilitiesResponse(Collections.emptyList(), Collections.emptyList())); + } + } + } + }; + } + + private static FieldCapabilitiesIndexRequest prepareLocalIndexRequest(FieldCapabilitiesRequest request, + String index, + OriginalIndices originalIndices, + long nowInMillis) { + return new FieldCapabilitiesIndexRequest(request.fields(), index, originalIndices, + request.indexFilter(), nowInMillis, request.runtimeFields()); + } + + private static FieldCapabilitiesRequest prepareRemoteRequest(FieldCapabilitiesRequest request, + OriginalIndices originalIndices, + long nowInMillis) { + FieldCapabilitiesRequest remoteRequest = new FieldCapabilitiesRequest(); + remoteRequest.setMergeResults(false); // we need to merge on this node + remoteRequest.indicesOptions(originalIndices.indicesOptions()); + remoteRequest.indices(originalIndices.indices()); + remoteRequest.fields(request.fields()); + remoteRequest.runtimeFields(request.runtimeFields()); + remoteRequest.indexFilter(request.indexFilter()); + remoteRequest.nowInMillis(nowInMillis); + return remoteRequest; + } + private FieldCapabilitiesResponse merge( List indexResponses, boolean includeUnmapped, @@ -317,14 +326,6 @@ int size() { } } - private static ClusterBlockException checkGlobalBlock(ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.READ); - } - - private static ClusterBlockException checkRequestBlock(ClusterState state, String concreteIndex) { - return state.blocks().indexBlockedException(ClusterBlockLevel.READ, concreteIndex); - } - private FieldCapabilitiesIndexResponse shardOperation(final FieldCapabilitiesIndexRequest request) throws IOException { final ShardId shardId = request.shardId(); final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); @@ -397,134 +398,6 @@ private boolean canMatchShard(FieldCapabilitiesIndexRequest req, SearchExecution return SearchService.queryStillMatchesAfterRewrite(searchRequest, searchExecutionContext); } - /** - * An action that executes on each shard sequentially until it finds one that can match the provided - * {@link FieldCapabilitiesIndexRequest#indexFilter()}. In which case the shard is used - * to create the final {@link FieldCapabilitiesIndexResponse}. - */ - public static class AsyncShardsAction { - private final FieldCapabilitiesIndexRequest request; - private final TransportService transportService; - private final DiscoveryNodes nodes; - private final ActionListener listener; - private final GroupShardsIterator shardsIt; - - private volatile int shardIndex = 0; - - public AsyncShardsAction(TransportService transportService, - ClusterService clusterService, - FieldCapabilitiesIndexRequest request, - ActionListener listener) { - this.listener = listener; - this.transportService = transportService; - - ClusterState clusterState = clusterService.state(); - if (logger.isTraceEnabled()) { - logger.trace("executing [{}] based on cluster state version [{}]", request, clusterState.version()); - } - nodes = clusterState.nodes(); - ClusterBlockException blockException = checkGlobalBlock(clusterState); - if (blockException != null) { - throw blockException; - } - - this.request = request; - blockException = checkRequestBlock(clusterState, request.index()); - if (blockException != null) { - throw blockException; - } - - shardsIt = clusterService.operationRouting().searchShards(clusterService.state(), - new String[]{request.index()}, null, null, null, null); - } - - public void start() { - tryNext(null, true); - } - - private void onFailure(ShardRouting shardRouting, Exception e) { - if (e != null) { - logger.trace(() -> new ParameterizedMessage("{}: failed to execute [{}]", shardRouting, request), e); - } - tryNext(e, false); - } - - private ShardRouting nextRoutingOrNull() { - if (shardsIt.size() == 0 || shardIndex >= shardsIt.size()) { - return null; - } - ShardRouting next = shardsIt.get(shardIndex).nextOrNull(); - if (next != null) { - return next; - } - moveToNextShard(); - return nextRoutingOrNull(); - } - - private void moveToNextShard() { - ++ shardIndex; - } - - private void tryNext(@Nullable final Exception lastFailure, boolean canMatchShard) { - ShardRouting shardRouting = nextRoutingOrNull(); - if (shardRouting == null) { - if (canMatchShard == false) { - if (lastFailure == null) { - listener.onResponse(new FieldCapabilitiesIndexResponse(request.index(), Collections.emptyMap(), false)); - } else { - logger.debug(() -> new ParameterizedMessage("{}: failed to execute [{}]", null, request), lastFailure); - listener.onFailure(lastFailure); - } - } else { - if (lastFailure == null || isShardNotAvailableException(lastFailure)) { - listener.onFailure(new NoShardAvailableActionException(null, - LoggerMessageFormat.format("No shard available for [{}]", request), lastFailure)); - } else { - logger.debug(() -> new ParameterizedMessage("{}: failed to execute [{}]", null, request), lastFailure); - listener.onFailure(lastFailure); - } - } - return; - } - DiscoveryNode node = nodes.get(shardRouting.currentNodeId()); - if (node == null) { - onFailure(shardRouting, new NoShardAvailableActionException(shardRouting.shardId())); - } else { - request.shardId(shardRouting.shardId()); - if (logger.isTraceEnabled()) { - logger.trace( - "sending request [{}] on node [{}]", - request, - node - ); - } - transportService.sendRequest(node, ACTION_SHARD_NAME, request, - new TransportResponseHandler() { - - @Override - public FieldCapabilitiesIndexResponse read(StreamInput in) throws IOException { - return new FieldCapabilitiesIndexResponse(in); - } - - @Override - public void handleResponse(final FieldCapabilitiesIndexResponse response) { - if (response.canMatch()) { - listener.onResponse(response); - } else { - moveToNextShard(); - tryNext(null, false); - } - } - - @Override - public void handleException(TransportException exp) { - onFailure(shardRouting, exp); - } - }); - } - } - } - private class ShardTransportHandler implements TransportRequestHandler { @Override public void messageReceived(final FieldCapabilitiesIndexRequest request, From 7c5bd0373862e917ae2d8a78afa897b3034703fe Mon Sep 17 00:00:00 2001 From: Gordon Brown Date: Mon, 30 Aug 2021 11:34:42 -0600 Subject: [PATCH 033/128] Add `replace` shutdown type (#75908) This PR adds the `REPLACE` shutdown type. As of this PR, `REPLACE` behaves identically to `REMOVE`. Co-authored-by: Lee Hinman Co-authored-by: Elastic Machine --- .../cluster/metadata/SingleNodeShutdownMetadata.java | 5 ++++- .../allocation/decider/NodeShutdownAllocationDecider.java | 2 ++ .../org/elasticsearch/shutdown/PluginShutdownService.java | 8 +++++--- .../decider/NodeShutdownAllocationDeciderTests.java | 6 +++--- .../xpack/core/ilm/CheckShrinkReadyStep.java | 5 ++++- .../xpack/core/ilm/CheckShrinkReadyStepTests.java | 4 ++-- .../elasticsearch/xpack/ilm/IndexLifecycleService.java | 4 +++- .../xpack/ilm/IndexLifecycleServiceTests.java | 2 +- .../xpack/shutdown/TransportPutShutdownNodeAction.java | 3 ++- .../xpack/shutdown/GetShutdownStatusResponseTests.java | 2 +- 10 files changed, 27 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java index 27789d27fd9c7..266b36c7c312d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java @@ -316,13 +316,16 @@ public SingleNodeShutdownMetadata build() { */ public enum Type { REMOVE, - RESTART; + RESTART, + REPLACE; public static Type parse(String type) { if ("remove".equals(type.toLowerCase(Locale.ROOT))) { return REMOVE; } else if ("restart".equals(type.toLowerCase(Locale.ROOT))) { return RESTART; + } else if ("replace".equals(type.toLowerCase(Locale.ROOT))) { + return REPLACE; } else { throw new IllegalArgumentException("unknown shutdown type: " + type); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDecider.java index 5a1391d39fe74..450937fc60a4c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDecider.java @@ -46,6 +46,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing } switch (thisNodeShutdownMetadata.getType()) { + case REPLACE: case REMOVE: return allocation.decision(Decision.NO, NAME, "node [%s] is preparing to be removed from the cluster", node.nodeId()); case RESTART: @@ -98,6 +99,7 @@ public Decision shouldAutoExpandToNode(IndexMetadata indexMetadata, DiscoveryNod "node [%s] is preparing to restart, auto-expansion waiting until it is complete", node.getId() ); + case REPLACE: case REMOVE: return allocation.decision(Decision.NO, NAME, "node [%s] is preparing for removal from the cluster", node.getId()); default: diff --git a/server/src/main/java/org/elasticsearch/shutdown/PluginShutdownService.java b/server/src/main/java/org/elasticsearch/shutdown/PluginShutdownService.java index eb953beddcb39..894af78108d17 100644 --- a/server/src/main/java/org/elasticsearch/shutdown/PluginShutdownService.java +++ b/server/src/main/java/org/elasticsearch/shutdown/PluginShutdownService.java @@ -26,6 +26,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.plugins.ShutdownAwarePlugin; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; @@ -56,13 +57,14 @@ public static Set shutdownNodes(final ClusterState clusterState) { } /** - * Return all nodes shutting down with the given shutdown type from the given cluster state + * Return all nodes shutting down with the given shutdown types from the given cluster state */ - public static Set shutdownTypeNodes(final ClusterState clusterState, final SingleNodeShutdownMetadata.Type shutdownType) { + public static Set shutdownTypeNodes(final ClusterState clusterState, final SingleNodeShutdownMetadata.Type... shutdownTypes) { + Set types = Arrays.stream(shutdownTypes).collect(Collectors.toSet()); return NodesShutdownMetadata.getShutdowns(clusterState) .map(NodesShutdownMetadata::getAllNodeMetadataMap) .map(m -> m.entrySet().stream() - .filter(e -> e.getValue().getType() == shutdownType) + .filter(e -> types.contains(e.getValue().getType())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) .map(Map::keySet) .orElse(Collections.emptySet()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java index c74d5ee68e4ea..4112caaca3ece 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java @@ -98,7 +98,7 @@ public void testCanAllocateShardsToRestartingNode() { public void testCannotAllocateShardsToRemovingNode() { ClusterState state = prepareState( service.reroute(ClusterState.EMPTY_STATE, "initial state"), - SingleNodeShutdownMetadata.Type.REMOVE + randomFrom(SingleNodeShutdownMetadata.Type.REMOVE, SingleNodeShutdownMetadata.Type.REPLACE) ); RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, state.getRoutingNodes(), state, null, null, 0); RoutingNode routingNode = new RoutingNode(DATA_NODE.getId(), DATA_NODE, shard); @@ -132,7 +132,7 @@ public void testShardsCanRemainOnRestartingNode() { public void testShardsCannotRemainOnRemovingNode() { ClusterState state = prepareState( service.reroute(ClusterState.EMPTY_STATE, "initial state"), - SingleNodeShutdownMetadata.Type.REMOVE + randomFrom(SingleNodeShutdownMetadata.Type.REMOVE, SingleNodeShutdownMetadata.Type.REPLACE) ); RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, state.getRoutingNodes(), state, null, null, 0); RoutingNode routingNode = new RoutingNode(DATA_NODE.getId(), DATA_NODE, shard); @@ -165,7 +165,7 @@ public void testCannotAutoExpandToRestartingNode() { public void testCannotAutoExpandToRemovingNode() { ClusterState state = prepareState( service.reroute(ClusterState.EMPTY_STATE, "initial state"), - SingleNodeShutdownMetadata.Type.REMOVE + randomFrom(SingleNodeShutdownMetadata.Type.REMOVE, SingleNodeShutdownMetadata.Type.REPLACE) ); RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, state.getRoutingNodes(), state, null, null, 0); allocation.debugDecision(true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStep.java index 6b59cebb51134..e77396b3e3f9f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStep.java @@ -76,7 +76,10 @@ public Result isConditionMet(Index index, ClusterState clusterState) { boolean nodeBeingRemoved = NodesShutdownMetadata.getShutdowns(clusterState) .map(NodesShutdownMetadata::getAllNodeMetadataMap) .map(shutdownMetadataMap -> shutdownMetadataMap.get(idShardsShouldBeOn)) - .map(singleNodeShutdown -> singleNodeShutdown.getType() == SingleNodeShutdownMetadata.Type.REMOVE) + .map( + singleNodeShutdown -> singleNodeShutdown.getType() == SingleNodeShutdownMetadata.Type.REMOVE + || singleNodeShutdown.getType() == SingleNodeShutdownMetadata.Type.REPLACE + ) .orElse(false); final IndexRoutingTable routingTable = clusterState.getRoutingTable().index(index); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java index 00fb39ac1b682..b45fef8b082cb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java @@ -361,7 +361,7 @@ public void testStepCompletableIfAllShardsActive() { .indices(indices.build()) .putCustom(NodesShutdownMetadata.TYPE, new NodesShutdownMetadata(Collections.singletonMap("node1", SingleNodeShutdownMetadata.builder() - .setType(SingleNodeShutdownMetadata.Type.REMOVE) + .setType(randomFrom(SingleNodeShutdownMetadata.Type.REMOVE, SingleNodeShutdownMetadata.Type.REPLACE)) .setStartedAtMillis(randomNonNegativeLong()) .setReason("test") .setNodeId("node1") @@ -412,7 +412,7 @@ public void testStepBecomesUncompletable() { .indices(indices.build()) .putCustom(NodesShutdownMetadata.TYPE, new NodesShutdownMetadata(Collections.singletonMap("node1", SingleNodeShutdownMetadata.builder() - .setType(SingleNodeShutdownMetadata.Type.REMOVE) + .setType(randomFrom(SingleNodeShutdownMetadata.Type.REMOVE, SingleNodeShutdownMetadata.Type.REPLACE)) .setStartedAtMillis(randomNonNegativeLong()) .setReason("test") .setNodeId("node1") diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java index 42231a2eac2dd..9877b451b660f 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java @@ -423,7 +423,8 @@ PolicyStepsRegistry getPolicyRegistry() { } static Set indicesOnShuttingDownNodesInDangerousStep(ClusterState state, String nodeId) { - final Set shutdownNodes = PluginShutdownService.shutdownTypeNodes(state, SingleNodeShutdownMetadata.Type.REMOVE); + final Set shutdownNodes = PluginShutdownService.shutdownTypeNodes(state, + SingleNodeShutdownMetadata.Type.REMOVE, SingleNodeShutdownMetadata.Type.REPLACE); if (shutdownNodes.isEmpty()) { return Collections.emptySet(); } @@ -461,6 +462,7 @@ public boolean safeToShutdown(String nodeId, SingleNodeShutdownMetadata.Type shu case RESTART: // It is safe to restart during ILM operation return true; + case REPLACE: case REMOVE: Set indices = indicesOnShuttingDownNodesInDangerousStep(clusterService.state(), nodeId); return indices.isEmpty(); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java index eef4337f11ce4..7a5efada07fae 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java @@ -587,7 +587,7 @@ public void testIndicesOnShuttingDownNodesInDangerousStep() { .setNodeId("shutdown_node") .setReason("shut down for test") .setStartedAtMillis(randomNonNegativeLong()) - .setType(SingleNodeShutdownMetadata.Type.REMOVE) + .setType(randomFrom(SingleNodeShutdownMetadata.Type.REMOVE, SingleNodeShutdownMetadata.Type.REPLACE)) .build()))) .build()) .build(); diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java index fc0e42e29db8a..493249afb29cc 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java @@ -109,7 +109,8 @@ public void onFailure(String source, Exception e) { @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - if (SingleNodeShutdownMetadata.Type.REMOVE.equals(request.getType())) { + if (SingleNodeShutdownMetadata.Type.REMOVE.equals(request.getType()) + || SingleNodeShutdownMetadata.Type.REPLACE.equals(request.getType())) { clusterService.getRerouteService() .reroute("node registered for removal from cluster", Priority.NORMAL, new ActionListener() { @Override diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusResponseTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusResponseTests.java index e6f651683b9ea..58ef961ad04d6 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusResponseTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusResponseTests.java @@ -48,7 +48,7 @@ protected GetShutdownStatusAction.Response mutateInstance(GetShutdownStatusActio public static SingleNodeShutdownMetadata randomNodeShutdownMetadata() { return SingleNodeShutdownMetadata.builder() .setNodeId(randomAlphaOfLength(5)) - .setType(randomBoolean() ? SingleNodeShutdownMetadata.Type.REMOVE : SingleNodeShutdownMetadata.Type.RESTART) + .setType(randomFrom(EnumSet.allOf(SingleNodeShutdownMetadata.Type.class))) .setReason(randomAlphaOfLength(5)) .setStartedAtMillis(randomNonNegativeLong()) .build(); From d71544976608bdb53fa4d29521fb328e1033ee2f Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Mon, 30 Aug 2021 13:42:20 -0400 Subject: [PATCH 034/128] Apply spotless to aggregations (#76682) * spotless apply for core analytics code * spotless apply for x-pack:plugin:analytics * spotless apply for matrix stats * don't format huge lists of HLL constants * Spotless for Rollups * Don't exclude HLLPP, we added narrower inline exclusions --- .../groovy/elasticsearch.formatting.gradle | 8 +- .../matrix/MatrixAggregationPlugin.java | 6 +- .../spi/MatrixStatsNamedXContentProvider.java | 2 +- .../matrix/stats/InternalMatrixStats.java | 14 +- .../matrix/stats/MatrixStats.java | 7 + .../stats/MatrixStatsAggregationBuilder.java | 20 +- .../matrix/stats/MatrixStatsAggregator.java | 15 +- .../stats/MatrixStatsAggregatorFactory.java | 31 +- .../matrix/stats/MatrixStatsParser.java | 19 +- .../matrix/stats/MatrixStatsResults.java | 5 +- .../matrix/stats/ParsedMatrixStats.java | 43 +- .../matrix/stats/RunningStats.java | 36 +- .../ArrayValuesSourceAggregationBuilder.java | 61 +- .../ArrayValuesSourceAggregatorFactory.java | 15 +- .../support/ArrayValuesSourceParser.java | 125 +- .../stats/InternalMatrixStatsTests.java | 74 +- .../stats/MatrixStatsAggregatorTests.java | 27 +- .../matrix/stats/MultiPassStats.java | 8 +- .../matrix/stats/RunningStatsTests.java | 6 +- .../MatrixStatsClientYamlTestSuiteIT.java | 3 +- .../AggregationsIntegrationIT.java | 11 +- .../search/aggregations/CombiIT.java | 51 +- .../search/aggregations/EquivalenceIT.java | 327 ++- .../aggregations/FiltersAggsRewriteIT.java | 6 +- .../search/aggregations/MetadataIT.java | 54 +- .../search/aggregations/MissingValueIT.java | 71 +- .../bucket/AdjacencyMatrixIT.java | 93 +- .../aggregations/bucket/BooleanTermsIT.java | 39 +- .../aggregations/bucket/DateHistogramIT.java | 816 ++++--- .../bucket/DateHistogramOffsetIT.java | 52 +- .../aggregations/bucket/DateRangeIT.java | 542 +++-- .../bucket/DiversifiedSamplerIT.java | 126 +- .../aggregations/bucket/DoubleTermsIT.java | 631 +++--- .../search/aggregations/bucket/FilterIT.java | 72 +- .../search/aggregations/bucket/FiltersIT.java | 173 +- .../aggregations/bucket/GeoDistanceIT.java | 166 +- .../aggregations/bucket/GeoHashGridIT.java | 108 +- .../search/aggregations/bucket/GlobalIT.java | 48 +- .../aggregations/bucket/HistogramIT.java | 594 +++-- .../search/aggregations/bucket/IpRangeIT.java | 91 +- .../search/aggregations/bucket/IpTermsIT.java | 55 +- .../aggregations/bucket/LongTermsIT.java | 578 +++-- .../aggregations/bucket/MinDocCountIT.java | 121 +- .../aggregations/bucket/NaNSortingIT.java | 46 +- .../search/aggregations/bucket/NestedIT.java | 665 +++--- .../search/aggregations/bucket/RangeIT.java | 421 ++-- .../aggregations/bucket/ReverseNestedIT.java | 445 ++-- .../search/aggregations/bucket/SamplerIT.java | 113 +- .../aggregations/bucket/ShardReduceIT.java | 236 +- .../aggregations/bucket/ShardSizeTermsIT.java | 188 +- .../SignificantTermsSignificanceScoreIT.java | 499 +++-- .../bucket/TermsDocCountErrorIT.java | 959 ++++---- .../bucket/TermsShardMinDocCountIT.java | 81 +- .../bucket/terms/RareTermsIT.java | 2 +- .../bucket/terms/StringTermsIT.java | 781 ++++--- .../aggregations/metrics/CardinalityIT.java | 355 +-- .../CardinalityWithRequestBreakerIT.java | 38 +- .../aggregations/metrics/ExtendedStatsIT.java | 625 ++++-- .../aggregations/metrics/GeoBoundsIT.java | 76 +- .../aggregations/metrics/GeoCentroidIT.java | 71 +- .../metrics/HDRPercentileRanksIT.java | 538 +++-- .../metrics/HDRPercentilesIT.java | 504 +++-- .../metrics/MedianAbsoluteDeviationIT.java | 348 +-- .../metrics/ScriptedMetricIT.java | 918 +++++--- .../search/aggregations/metrics/StatsIT.java | 204 +- .../search/aggregations/metrics/SumIT.java | 217 +- .../metrics/TDigestPercentileRanksIT.java | 387 ++-- .../metrics/TDigestPercentilesIT.java | 361 +-- .../aggregations/metrics/TopHitsIT.java | 1032 +++++---- .../aggregations/metrics/ValueCountIT.java | 224 +- .../aggregations/pipeline/AvgBucketIT.java | 139 +- .../aggregations/pipeline/BucketScriptIT.java | 368 +-- .../pipeline/BucketSelectorIT.java | 317 +-- .../aggregations/pipeline/BucketSortIT.java | 266 ++- .../pipeline/DateDerivativeIT.java | 265 ++- .../aggregations/pipeline/DerivativeIT.java | 252 ++- .../pipeline/ExtendedStatsBucketIT.java | 183 +- .../aggregations/pipeline/MaxBucketIT.java | 208 +- .../aggregations/pipeline/MinBucketIT.java | 143 +- .../pipeline/PercentilesBucketIT.java | 230 +- .../aggregations/pipeline/SerialDiffIT.java | 75 +- .../aggregations/pipeline/StatsBucketIT.java | 139 +- .../aggregations/pipeline/SumBucketIT.java | 139 +- .../AbstractAggregationBuilder.java | 18 +- .../aggregations/AggregationBuilder.java | 14 +- .../aggregations/AggregationBuilders.java | 19 +- .../AggregationExecutionException.java | 2 +- .../AggregationInitializationException.java | 2 +- .../search/aggregations/AggregationPhase.java | 8 +- .../search/aggregations/Aggregations.java | 6 +- .../search/aggregations/Aggregator.java | 33 +- .../search/aggregations/AggregatorBase.java | 24 +- .../aggregations/AggregatorFactories.java | 207 +- .../aggregations/AggregatorFactory.java | 12 +- .../search/aggregations/BucketCollector.java | 4 +- .../search/aggregations/BucketOrder.java | 2 +- .../aggregations/InternalAggregation.java | 50 +- .../aggregations/InternalAggregations.java | 6 +- .../InternalMultiBucketAggregation.java | 20 +- .../search/aggregations/InternalOrder.java | 46 +- .../aggregations/LeafBucketCollector.java | 2 + .../aggregations/MultiBucketCollector.java | 5 +- .../MultiBucketConsumerService.java | 22 +- .../aggregations/ParsedAggregation.java | 7 +- .../ParsedMultiBucketAggregation.java | 26 +- .../PipelineAggregationBuilder.java | 38 +- .../PipelineAggregatorBuilders.java | 3 +- .../bucket/BucketsAggregator.java | 91 +- .../bucket/DeferableBucketAggregator.java | 9 +- .../bucket/DeferringBucketCollector.java | 9 +- .../InternalSingleBucketAggregation.java | 21 +- .../bucket/IteratorAndCurrent.java | 1 - .../bucket/ParsedSingleBucketAggregation.java | 10 +- .../bucket/SingleBucketAggregator.java | 3 +- .../bucket/adjacency/AdjacencyMatrix.java | 3 +- .../AdjacencyMatrixAggregationBuilder.java | 39 +- .../adjacency/AdjacencyMatrixAggregator.java | 35 +- .../AdjacencyMatrixAggregatorFactory.java | 12 +- .../adjacency/InternalAdjacencyMatrix.java | 14 +- .../adjacency/ParsedAdjacencyMatrix.java | 15 +- .../bucket/composite/BinaryValuesSource.java | 25 +- .../composite/CompositeAggregation.java | 3 +- .../CompositeAggregationBuilder.java | 59 +- .../CompositeAggregationFactory.java | 13 +- .../bucket/composite/CompositeAggregator.java | 94 +- .../bucket/composite/CompositeKey.java | 4 +- .../CompositeValuesCollectorQueue.java | 17 +- .../CompositeValuesSourceBuilder.java | 28 +- .../CompositeValuesSourceConfig.java | 2 +- .../CompositeValuesSourceParserHelper.java | 30 +- .../DateHistogramValuesSourceBuilder.java | 16 +- .../bucket/composite/DoubleValuesSource.java | 22 +- .../GeoTileGridValuesSourceBuilder.java | 21 +- .../bucket/composite/GeoTileValuesSource.java | 18 +- .../HistogramValuesSourceBuilder.java | 5 +- .../bucket/composite/InternalComposite.java | 132 +- .../bucket/composite/LongValuesSource.java | 28 +- .../bucket/composite/OrdinalValuesSource.java | 19 +- .../bucket/composite/ParsedComposite.java | 27 +- .../composite/PointsSortedDocsProducer.java | 32 +- .../SingleDimensionValuesSource.java | 26 +- .../bucket/composite/SortedDocsProducer.java | 19 +- .../composite/TermsSortedDocsProducer.java | 6 +- .../composite/TermsValuesSourceBuilder.java | 10 +- .../filter/DocValuesFieldExistsAdapter.java | 2 +- .../aggregations/bucket/filter/Filter.java | 3 +- .../filter/FilterAggregationBuilder.java | 14 +- .../bucket/filter/FilterAggregator.java | 32 +- .../filter/FilterAggregatorFactory.java | 10 +- .../filter/FilterByFilterAggregator.java | 6 +- .../aggregations/bucket/filter/Filters.java | 3 +- .../filter/FiltersAggregationBuilder.java | 48 +- .../bucket/filter/FiltersAggregator.java | 45 +- .../filter/FiltersAggregatorFactory.java | 32 +- .../bucket/filter/InternalFilters.java | 9 +- .../filter/MatchAllQueryToFilterAdapter.java | 2 +- .../filter/MatchNoneQueryToFilterAdapter.java | 2 +- .../bucket/filter/ParsedFilters.java | 24 +- .../filter/TermQueryToFilterAdapter.java | 2 +- .../aggregations/bucket/geogrid/GeoGrid.java | 3 +- .../geogrid/GeoGridAggregationBuilder.java | 76 +- .../bucket/geogrid/GeoGridAggregator.java | 17 +- .../bucket/geogrid/GeoHashCellIdSource.java | 6 +- .../GeoHashGridAggregationBuilder.java | 47 +- .../bucket/geogrid/GeoHashGridAggregator.java | 17 +- .../geogrid/GeoHashGridAggregatorFactory.java | 73 +- .../bucket/geogrid/GeoTileCellIdSource.java | 9 +- .../GeoTileGridAggregationBuilder.java | 42 +- .../bucket/geogrid/GeoTileGridAggregator.java | 17 +- .../geogrid/GeoTileGridAggregatorFactory.java | 33 +- .../bucket/geogrid/GeoTileUtils.java | 33 +- .../bucket/geogrid/InternalGeoGrid.java | 8 +- .../bucket/geogrid/InternalGeoGridBucket.java | 10 +- .../bucket/geogrid/ParsedGeoGrid.java | 12 +- .../bucket/geogrid/ParsedGeoHashGrid.java | 7 +- .../bucket/geogrid/ParsedGeoTileGrid.java | 7 +- .../aggregations/bucket/global/Global.java | 3 +- .../global/GlobalAggregationBuilder.java | 2 +- .../bucket/global/GlobalAggregator.java | 15 +- .../global/GlobalAggregatorFactory.java | 22 +- .../AbstractHistogramAggregator.java | 43 +- .../AutoDateHistogramAggregationBuilder.java | 102 +- .../AutoDateHistogramAggregator.java | 46 +- .../AutoDateHistogramAggregatorFactory.java | 37 +- .../AutoDateHistogramAggregatorSupplier.java | 3 +- .../DateHistogramAggregationBuilder.java | 159 +- .../DateHistogramAggregationSupplier.java | 30 +- .../histogram/DateHistogramAggregator.java | 48 +- .../DateHistogramAggregatorFactory.java | 28 +- .../histogram/DateHistogramInterval.java | 3 +- .../histogram/DateIntervalConsumer.java | 1 + .../bucket/histogram/DateIntervalWrapper.java | 33 +- .../DateRangeHistogramAggregator.java | 74 +- .../bucket/histogram/DoubleBounds.java | 28 +- .../HistogramAggregationBuilder.java | 110 +- .../histogram/HistogramAggregatorFactory.java | 86 +- .../histogram/InternalAutoDateHistogram.java | 64 +- .../histogram/InternalDateHistogram.java | 55 +- .../bucket/histogram/InternalHistogram.java | 34 +- .../InternalVariableWidthHistogram.java | 110 +- .../bucket/histogram/LongBounds.java | 33 +- .../histogram/NumericHistogramAggregator.java | 3 +- .../histogram/ParsedAutoDateHistogram.java | 21 +- .../bucket/histogram/ParsedDateHistogram.java | 15 +- .../bucket/histogram/ParsedHistogram.java | 15 +- .../ParsedVariableWidthHistogram.java | 47 +- .../histogram/RangeHistogramAggregator.java | 10 +- ...iableWidthHistogramAggregationBuilder.java | 73 +- .../VariableWidthHistogramAggregator.java | 131 +- ...riableWidthHistogramAggregatorFactory.java | 41 +- .../aggregations/bucket/missing/Missing.java | 3 +- .../missing/MissingAggregationBuilder.java | 30 +- .../bucket/missing/MissingAggregator.java | 31 +- .../missing/MissingAggregatorFactory.java | 21 +- .../missing/MissingAggregatorSupplier.java | 16 +- .../aggregations/bucket/nested/Nested.java | 3 +- .../nested/NestedAggregationBuilder.java | 25 +- .../bucket/nested/NestedAggregator.java | 22 +- .../nested/NestedAggregatorFactory.java | 19 +- .../bucket/nested/ReverseNested.java | 3 +- .../ReverseNestedAggregationBuilder.java | 15 +- .../nested/ReverseNestedAggregator.java | 26 +- .../ReverseNestedAggregatorFactory.java | 20 +- .../range/AbstractRangeAggregatorFactory.java | 45 +- .../bucket/range/AbstractRangeBuilder.java | 18 +- .../bucket/range/BinaryRangeAggregator.java | 51 +- .../range/BinaryRangeAggregatorFactory.java | 54 +- .../range/DateRangeAggregationBuilder.java | 48 +- .../range/GeoDistanceAggregationBuilder.java | 83 +- .../range/GeoDistanceAggregatorSupplier.java | 3 +- .../GeoDistanceRangeAggregatorFactory.java | 74 +- .../bucket/range/InternalBinaryRange.java | 49 +- .../bucket/range/InternalDateRange.java | 72 +- .../bucket/range/InternalGeoDistance.java | 30 +- .../bucket/range/InternalRange.java | 66 +- .../range/IpRangeAggregationBuilder.java | 48 +- .../range/IpRangeAggregatorSupplier.java | 22 +- .../bucket/range/ParsedBinaryRange.java | 23 +- .../bucket/range/ParsedDateRange.java | 15 +- .../bucket/range/ParsedGeoDistance.java | 15 +- .../bucket/range/ParsedRange.java | 41 +- .../bucket/range/RangeAggregationBuilder.java | 37 +- .../bucket/range/RangeAggregator.java | 53 +- .../bucket/range/RangeAggregatorSupplier.java | 22 +- .../sampler/BestDocsDeferringCollector.java | 5 +- .../DiversifiedAggregationBuilder.java | 39 +- .../sampler/DiversifiedAggregatorFactory.java | 30 +- .../DiversifiedAggregatorSupplier.java | 3 +- ...DiversifiedBytesHashSamplerAggregator.java | 4 +- .../DiversifiedMapSamplerAggregator.java | 5 +- .../DiversifiedNumericSamplerAggregator.java | 3 +- .../DiversifiedOrdinalsSamplerAggregator.java | 4 +- .../bucket/sampler/InternalSampler.java | 3 +- .../aggregations/bucket/sampler/Sampler.java | 3 +- .../sampler/SamplerAggregationBuilder.java | 14 +- .../bucket/sampler/SamplerAggregator.java | 60 +- .../sampler/SamplerAggregatorFactory.java | 10 +- .../bucket/terms/AbstractInternalTerms.java | 42 +- .../terms/AbstractRareTermsAggregator.java | 13 +- .../terms/AbstractStringTermsAggregator.java | 44 +- .../bucket/terms/BytesKeyedBucketOrds.java | 4 +- .../bucket/terms/DoubleTerms.java | 89 +- .../GlobalOrdinalsStringTermsAggregator.java | 28 +- .../bucket/terms/IncludeExclude.java | 71 +- .../bucket/terms/InternalMappedRareTerms.java | 38 +- .../terms/InternalMappedSignificantTerms.java | 31 +- .../bucket/terms/InternalMappedTerms.java | 29 +- .../bucket/terms/InternalRareTerms.java | 22 +- .../terms/InternalSignificantTerms.java | 50 +- .../bucket/terms/InternalTerms.java | 64 +- .../bucket/terms/LongKeyedBucketOrds.java | 22 +- .../bucket/terms/LongRareTerms.java | 12 +- .../bucket/terms/LongRareTermsAggregator.java | 13 +- .../aggregations/bucket/terms/LongTerms.java | 115 +- .../terms/MapStringTermsAggregator.java | 21 +- .../bucket/terms/NumericTermsAggregator.java | 9 +- .../bucket/terms/ParsedDoubleTerms.java | 7 +- .../bucket/terms/ParsedLongRareTerms.java | 8 +- .../bucket/terms/ParsedLongTerms.java | 7 +- .../bucket/terms/ParsedRareTerms.java | 23 +- .../terms/ParsedSignificantLongTerms.java | 7 +- .../terms/ParsedSignificantStringTerms.java | 7 +- .../bucket/terms/ParsedSignificantTerms.java | 32 +- .../bucket/terms/ParsedStringRareTerms.java | 13 +- .../bucket/terms/ParsedStringTerms.java | 21 +- .../bucket/terms/ParsedTerms.java | 34 +- .../aggregations/bucket/terms/RareTerms.java | 2 - .../terms/RareTermsAggregationBuilder.java | 86 +- .../terms/RareTermsAggregatorFactory.java | 138 +- .../terms/RareTermsAggregatorSupplier.java | 24 +- .../bucket/terms/SignificanceLookup.java | 4 +- .../bucket/terms/SignificantLongTerms.java | 77 +- .../bucket/terms/SignificantStringTerms.java | 74 +- .../SignificantTermsAggregationBuilder.java | 97 +- .../SignificantTermsAggregatorFactory.java | 265 ++- .../SignificantTermsAggregatorSupplier.java | 28 +- .../SignificantTextAggregationBuilder.java | 171 +- .../SignificantTextAggregatorFactory.java | 79 +- .../bucket/terms/StringRareTerms.java | 18 +- .../terms/StringRareTermsAggregator.java | 17 +- .../bucket/terms/StringTerms.java | 86 +- .../bucket/terms/TermsAggregationBuilder.java | 118 +- .../bucket/terms/TermsAggregator.java | 32 +- .../bucket/terms/TermsAggregatorFactory.java | 276 ++- .../bucket/terms/TermsAggregatorSupplier.java | 28 +- .../terms/UnmappedSignificantTerms.java | 21 +- .../bucket/terms/UnmappedTerms.java | 12 +- .../bucket/terms/heuristic/ChiSquare.java | 11 +- .../bucket/terms/heuristic/GND.java | 8 +- .../bucket/terms/heuristic/JLHScore.java | 13 +- .../terms/heuristic/MutualInformation.java | 24 +- .../heuristic/NXYSignificanceHeuristic.java | 60 +- .../terms/heuristic/PercentageScore.java | 18 +- .../terms/heuristic/ScriptHeuristic.java | 25 +- .../heuristic/SignificanceHeuristic.java | 14 +- .../SignificanceHeuristicBuilder.java | 5 +- .../AbstractHDRPercentilesAggregator.java | 21 +- .../metrics/AbstractHyperLogLog.java | 2 + .../metrics/AbstractHyperLogLogPlusPlus.java | 7 +- .../AbstractInternalHDRPercentiles.java | 33 +- .../AbstractInternalTDigestPercentiles.java | 25 +- ...AbstractPercentilesAggregationBuilder.java | 65 +- .../AbstractTDigestPercentilesAggregator.java | 21 +- .../metrics/AvgAggregationBuilder.java | 12 +- .../aggregations/metrics/AvgAggregator.java | 14 +- .../metrics/AvgAggregatorFactory.java | 23 +- .../CardinalityAggregationBuilder.java | 50 +- .../metrics/CardinalityAggregator.java | 48 +- .../metrics/CardinalityAggregatorFactory.java | 51 +- .../CardinalityAggregatorSupplier.java | 14 +- .../aggregations/metrics/CompensatedSum.java | 3 - .../aggregations/metrics/ExtendedStats.java | 8 +- .../ExtendedStatsAggregationBuilder.java | 34 +- .../metrics/ExtendedStatsAggregator.java | 107 +- .../ExtendedStatsAggregatorFactory.java | 28 +- .../ExtendedStatsAggregatorProvider.java | 14 +- .../metrics/GeoBoundsAggregationBuilder.java | 46 +- .../metrics/GeoBoundsAggregator.java | 20 +- .../metrics/GeoBoundsAggregatorFactory.java | 25 +- .../aggregations/metrics/GeoCentroid.java | 1 + .../GeoCentroidAggregationBuilder.java | 34 +- .../metrics/GeoCentroidAggregator.java | 12 +- .../metrics/GeoCentroidAggregatorFactory.java | 23 +- .../GlobalOrdCardinalityAggregator.java | 39 +- .../metrics/HDRPercentileRanksAggregator.java | 14 +- .../metrics/HDRPercentilesAggregator.java | 13 +- .../metrics/HyperLogLogPlusPlus.java | 7 +- .../metrics/HyperLogLogPlusPlusSparse.java | 7 +- .../aggregations/metrics/InternalAvg.java | 6 +- .../metrics/InternalCardinality.java | 4 +- .../metrics/InternalExtendedStats.java | 58 +- .../metrics/InternalGeoBounds.java | 75 +- .../metrics/InternalGeoCentroid.java | 17 +- .../metrics/InternalHDRPercentileRanks.java | 19 +- .../metrics/InternalHDRPercentiles.java | 19 +- .../InternalMedianAbsoluteDeviation.java | 4 +- .../InternalNumericMetricsAggregation.java | 8 +- .../metrics/InternalScriptedMetric.java | 9 +- .../aggregations/metrics/InternalStats.java | 43 +- .../aggregations/metrics/InternalSum.java | 2 +- .../InternalTDigestPercentileRanks.java | 22 +- .../metrics/InternalTDigestPercentiles.java | 19 +- .../aggregations/metrics/InternalTopHits.java | 22 +- .../metrics/InternalWeightedAvg.java | 6 +- .../metrics/MaxAggregationBuilder.java | 23 +- .../aggregations/metrics/MaxAggregator.java | 11 +- .../metrics/MaxAggregatorFactory.java | 26 +- ...anAbsoluteDeviationAggregationBuilder.java | 50 +- .../MedianAbsoluteDeviationAggregator.java | 18 +- ...ianAbsoluteDeviationAggregatorFactory.java | 41 +- ...anAbsoluteDeviationAggregatorSupplier.java | 16 +- .../metrics/MetricAggregatorSupplier.java | 12 +- .../metrics/MetricInspectionHelper.java | 2 +- .../metrics/MinAggregationBuilder.java | 23 +- .../aggregations/metrics/MinAggregator.java | 13 +- .../metrics/MinAggregatorFactory.java | 26 +- .../metrics/NumericMetricsAggregator.java | 13 +- .../metrics/ParsedCardinality.java | 8 +- .../metrics/ParsedExtendedStats.java | 217 +- .../aggregations/metrics/ParsedGeoBounds.java | 32 +- .../metrics/ParsedGeoCentroid.java | 12 +- .../metrics/ParsedHDRPercentileRanks.java | 7 +- .../metrics/ParsedHDRPercentiles.java | 7 +- .../metrics/ParsedPercentiles.java | 3 +- .../metrics/ParsedScriptedMetric.java | 19 +- ...dSingleValueNumericMetricsAggregation.java | 17 +- .../aggregations/metrics/ParsedStats.java | 80 +- .../metrics/ParsedTDigestPercentileRanks.java | 7 +- .../metrics/ParsedTDigestPercentiles.java | 7 +- .../aggregations/metrics/ParsedTopHits.java | 16 +- .../metrics/ParsedValueCount.java | 7 +- .../metrics/ParsedWeightedAvg.java | 7 +- .../aggregations/metrics/Percentile.java | 3 +- .../PercentileRanksAggregationBuilder.java | 54 +- .../PercentileRanksAggregatorFactory.java | 49 +- .../PercentilesAggregationBuilder.java | 61 +- .../metrics/PercentilesAggregatorFactory.java | 47 +- .../PercentilesAggregatorSupplier.java | 20 +- .../metrics/PercentilesConfig.java | 94 +- .../metrics/PercentilesMethod.java | 2 +- .../ScriptedMetricAggregationBuilder.java | 62 +- .../metrics/ScriptedMetricAggregator.java | 4 +- .../ScriptedMetricAggregatorFactory.java | 28 +- .../search/aggregations/metrics/Stats.java | 1 - .../metrics/StatsAggregationBuilder.java | 22 +- .../aggregations/metrics/StatsAggregator.java | 42 +- .../metrics/StatsAggregatorFactory.java | 26 +- .../metrics/SumAggregationBuilder.java | 23 +- .../aggregations/metrics/SumAggregator.java | 5 +- .../metrics/SumAggregatorFactory.java | 29 +- .../TDigestPercentileRanksAggregator.java | 20 +- .../metrics/TDigestPercentilesAggregator.java | 20 +- .../aggregations/metrics/TDigestState.java | 1 + .../metrics/TopHitsAggregationBuilder.java | 218 +- .../metrics/TopHitsAggregator.java | 47 +- .../metrics/TopHitsAggregatorFactory.java | 44 +- .../metrics/ValueCountAggregationBuilder.java | 26 +- .../metrics/ValueCountAggregator.java | 20 +- .../metrics/ValueCountAggregatorFactory.java | 23 +- .../WeightedAvgAggregationBuilder.java | 22 +- .../metrics/WeightedAvgAggregator.java | 22 +- .../metrics/WeightedAvgAggregatorFactory.java | 13 +- .../aggregations/metrics/package-info.java | 1 - .../AbstractPipelineAggregationBuilder.java | 6 +- .../AvgBucketPipelineAggregationBuilder.java | 7 +- .../pipeline/AvgBucketPipelineAggregator.java | 9 +- .../aggregations/pipeline/BucketHelpers.java | 60 +- .../pipeline/BucketMetricsParser.java | 41 +- ...cketMetricsPipelineAggregationBuilder.java | 27 +- .../BucketMetricsPipelineAggregator.java | 12 +- ...ucketScriptPipelineAggregationBuilder.java | 56 +- .../BucketScriptPipelineAggregator.java | 28 +- ...ketSelectorPipelineAggregationBuilder.java | 41 +- .../BucketSelectorPipelineAggregator.java | 17 +- .../BucketSortPipelineAggregationBuilder.java | 48 +- .../BucketSortPipelineAggregator.java | 15 +- ...mulativeSumPipelineAggregationBuilder.java | 5 +- .../CumulativeSumPipelineAggregator.java | 11 +- .../DerivativePipelineAggregationBuilder.java | 44 +- .../DerivativePipelineAggregator.java | 24 +- .../pipeline/ExtendedStatsBucket.java | 3 +- .../pipeline/ExtendedStatsBucketParser.java | 17 +- ...StatsBucketPipelineAggregationBuilder.java | 4 +- ...ExtendedStatsBucketPipelineAggregator.java | 10 +- .../pipeline/InternalBucketMetricValue.java | 5 +- .../pipeline/InternalDerivative.java | 3 +- .../pipeline/InternalExtendedStatsBucket.java | 13 +- .../pipeline/InternalPercentilesBucket.java | 27 +- .../pipeline/InternalStatsBucket.java | 11 +- .../MaxBucketPipelineAggregationBuilder.java | 7 +- .../pipeline/MaxBucketPipelineAggregator.java | 9 +- .../MinBucketPipelineAggregationBuilder.java | 7 +- .../pipeline/MinBucketPipelineAggregator.java | 9 +- .../MovFnPipelineAggregationBuilder.java | 19 +- .../pipeline/MovFnPipelineAggregator.java | 31 +- .../pipeline/MovingFunctionScript.java | 2 +- .../pipeline/MovingFunctions.java | 18 +- .../pipeline/ParsedBucketMetricValue.java | 5 +- .../pipeline/ParsedDerivative.java | 9 +- .../pipeline/ParsedExtendedStatsBucket.java | 6 +- .../pipeline/ParsedPercentilesBucket.java | 16 +- .../pipeline/ParsedSimpleValue.java | 7 +- .../pipeline/ParsedStatsBucket.java | 6 +- .../pipeline/PercentilesBucket.java | 3 +- ...tilesBucketPipelineAggregationBuilder.java | 40 +- .../PercentilesBucketPipelineAggregator.java | 13 +- .../pipeline/PipelineAggregator.java | 4 +- .../SerialDiffPipelineAggregationBuilder.java | 56 +- .../SerialDiffPipelineAggregator.java | 25 +- ...StatsBucketPipelineAggregationBuilder.java | 10 +- .../StatsBucketPipelineAggregator.java | 9 +- .../SumBucketPipelineAggregationBuilder.java | 7 +- .../pipeline/SumBucketPipelineAggregator.java | 9 +- .../support/AggregationContext.java | 4 +- .../aggregations/support/AggregationInfo.java | 2 +- .../support/AggregationInspectionHelper.java | 2 +- .../aggregations/support/AggregationPath.java | 4 +- .../support/AggregationUsageService.java | 5 +- .../support/CoreValuesSourceType.java | 83 +- .../aggregations/support/MissingValues.java | 31 +- .../support/MultiValuesSource.java | 7 +- .../MultiValuesSourceAggregationBuilder.java | 75 +- .../MultiValuesSourceAggregatorFactory.java | 13 +- .../support/MultiValuesSourceFieldConfig.java | 96 +- .../support/MultiValuesSourceParseHelper.java | 43 +- .../aggregations/support/ValueType.java | 46 +- .../aggregations/support/ValuesSource.java | 11 +- .../ValuesSourceAggregationBuilder.java | 126 +- .../ValuesSourceAggregatorFactory.java | 15 +- .../support/ValuesSourceConfig.java | 101 +- .../support/ValuesSourceRegistry.java | 11 +- .../support/ValuesSourceType.java | 3 +- .../aggregations/AdaptingAggregatorTests.java | 9 +- .../AggregationTestScriptsPlugin.java | 16 +- .../aggregations/AggregationsTests.java | 125 +- .../aggregations/AggregatorBaseTests.java | 22 +- .../AggregatorFactoriesBuilderTests.java | 16 +- .../AggregatorFactoriesTests.java | 177 +- .../InternalAggregationsTests.java | 54 +- .../InternalMultiBucketAggregationTests.java | 80 +- .../aggregations/InternalOrderTests.java | 13 +- .../MultiBucketCollectorTests.java | 9 +- .../SubAggCollectionModeTests.java | 2 +- .../bucket/AutoDateHistogramTests.java | 2 +- .../BestBucketsDeferringCollectorTests.java | 2 +- .../aggregations/bucket/BucketUtilsTests.java | 9 +- .../bucket/BucketsAggregatorTests.java | 24 +- .../aggregations/bucket/DateRangeTests.java | 28 +- .../bucket/DateScriptMocksPlugin.java | 9 +- .../bucket/DocCountProviderTests.java | 72 +- .../aggregations/bucket/FilterTests.java | 6 +- .../aggregations/bucket/FiltersTests.java | 49 +- .../bucket/GeoDistanceRangeTests.java | 56 +- .../aggregations/bucket/GeoHashGridTests.java | 14 +- .../aggregations/bucket/GeoTileGridTests.java | 14 +- .../aggregations/bucket/HistogramTests.java | 4 +- .../aggregations/bucket/IpRangeTests.java | 44 +- .../aggregations/bucket/RangeTests.java | 38 +- .../bucket/ShardSizeTestCase.java | 13 +- .../bucket/SignificantTermsTests.java | 160 +- .../bucket/SignificantTextTests.java | 38 +- .../aggregations/bucket/TermsTests.java | 82 +- .../InternalAdjacencyMatrixTests.java | 46 +- .../CompositeAggregationBuilderTests.java | 14 +- .../composite/CompositeAggregatorTests.java | 1989 ++++++++--------- .../CompositeValuesCollectorQueueTests.java | 84 +- .../composite/InternalCompositeTests.java | 188 +- .../SingleDimensionValuesSourceTests.java | 118 +- .../bucket/filter/FilterAggregatorTests.java | 20 +- .../bucket/filter/FiltersAggregatorTests.java | 162 +- .../bucket/filter/InternalFilterTests.java | 8 +- .../bucket/filter/InternalFiltersTests.java | 41 +- .../geogrid/GeoGridAggregatorTestCase.java | 79 +- .../bucket/geogrid/GeoGridTestCase.java | 54 +- .../geogrid/GeoHashGridAggregatorTests.java | 6 +- .../geogrid/GeoHashGridParserTests.java | 83 +- .../bucket/geogrid/GeoHashGridTests.java | 8 +- .../geogrid/GeoTileGridAggregatorTests.java | 19 +- .../geogrid/GeoTileGridParserTests.java | 55 +- .../bucket/geogrid/GeoTileGridTests.java | 8 +- .../bucket/geogrid/GeoTileUtilsTests.java | 50 +- .../bucket/global/InternalGlobalTests.java | 8 +- ...oDateHistogramAggregationBuilderTests.java | 9 +- .../AutoDateHistogramAggregatorTests.java | 383 ++-- .../DateHistogramAggregatorTestCase.java | 92 +- .../DateHistogramAggregatorTests.java | 472 ++-- .../bucket/histogram/DateHistogramTests.java | 45 +- .../histogram/DateIntervalWrapperTests.java | 23 +- .../DateRangeHistogramAggregatorTests.java | 1215 ++++++---- .../InternalAutoDateHistogramTests.java | 174 +- .../histogram/InternalDateHistogramTests.java | 93 +- .../histogram/InternalHistogramTests.java | 109 +- .../InternalVariableWidthHistogramTests.java | 261 ++- .../bucket/histogram/LongBoundsTests.java | 26 +- .../NumericHistogramAggregatorTests.java | 149 +- .../RangeHistogramAggregatorTests.java | 212 +- ...VariableWidthHistogramAggregatorTests.java | 246 +- .../bucket/missing/InternalMissingTests.java | 8 +- .../missing/MissingAggregatorTests.java | 274 +-- .../bucket/nested/InternalNestedTests.java | 8 +- .../nested/InternalReverseNestedTests.java | 8 +- .../bucket/nested/NestedAggregatorTests.java | 261 ++- .../nested/ReverseNestedAggregatorTests.java | 101 +- .../range/BinaryRangeAggregatorTests.java | 30 +- .../range/DateRangeAggregatorTests.java | 186 +- .../range/InternalBinaryRangeTests.java | 67 +- .../bucket/range/InternalDateRangeTests.java | 74 +- .../range/InternalGeoDistanceTests.java | 62 +- .../bucket/range/InternalRangeTestCase.java | 9 +- .../bucket/range/InternalRangeTests.java | 63 +- .../bucket/range/IpRangeAggregatorTests.java | 25 +- .../range/RangeAggregationBuilderTests.java | 5 +- .../bucket/range/RangeAggregatorTests.java | 154 +- .../BestDocsDeferringCollectorTests.java | 7 +- .../sampler/DiversifiedSamplerTests.java | 40 +- .../bucket/sampler/InternalSamplerTests.java | 9 +- .../sampler/SamplerAggregatorTests.java | 51 +- .../terms/BinaryTermsAggregatorTests.java | 99 +- .../terms/BytesKeyedBucketOrdsTests.java | 6 +- .../bucket/terms/DoubleTermsTests.java | 162 +- .../terms/InternalRareTermsTestCase.java | 23 +- .../InternalSignificantTermsTestCase.java | 65 +- .../bucket/terms/InternalTermsTestCase.java | 40 +- .../terms/KeywordTermsAggregatorTests.java | 73 +- .../terms/LongKeyedBucketOrdsTests.java | 7 +- .../bucket/terms/LongRareTermsTests.java | 10 +- .../bucket/terms/LongTermsTests.java | 162 +- .../terms/NumericTermsAggregatorTests.java | 117 +- .../terms/RareTermsAggregatorTests.java | 342 ++- .../terms/SignificantLongTermsTests.java | 155 +- .../terms/SignificantStringTermsTests.java | 166 +- .../SignificantTermsAggregatorTests.java | 57 +- .../terms/SignificantTextAggregatorTests.java | 71 +- .../bucket/terms/StringRareTermsTests.java | 20 +- .../bucket/terms/StringTermsTests.java | 77 +- .../terms/TermsAggregatorFactoryTests.java | 30 +- .../bucket/terms/TermsAggregatorTests.java | 481 ++-- .../bucket/terms/heuristic/GNDTests.java | 6 +- .../heuristic/MutualInformationTests.java | 3 +- .../metrics/AbstractGeoTestCase.java | 174 +- .../AbstractNumericMetricTestCase.java | 4 +- .../metrics/AbstractPercentilesTestCase.java | 62 +- .../metrics/AdjacencyMatrixTests.java | 3 +- .../metrics/AvgAggregatorTests.java | 137 +- .../metrics/CardinalityAggregatorTests.java | 33 +- .../metrics/CompensatedSumTests.java | 2 +- .../metrics/ExtendedStatsAggregatorTests.java | 322 +-- .../metrics/GeoBoundsAggregatorTests.java | 27 +- .../aggregations/metrics/GeoBoundsTests.java | 4 +- .../metrics/GeoCentroidAggregatorTests.java | 49 +- .../HDRPercentileRanksAggregatorTests.java | 46 +- .../HDRPercentilesAggregatorTests.java | 51 +- .../HyperLogLogPlusPlusSparseTests.java | 11 +- .../metrics/HyperLogLogPlusPlusTests.java | 7 +- .../metrics/InternalAvgTests.java | 56 +- .../metrics/InternalCardinalityTests.java | 62 +- .../metrics/InternalExtendedStatsTests.java | 197 +- .../metrics/InternalGeoBoundsTests.java | 90 +- .../metrics/InternalGeoCentroidTests.java | 81 +- .../InternalHDRPercentilesRanksTests.java | 65 +- .../metrics/InternalHDRPercentilesTests.java | 70 +- .../metrics/InternalMaxTests.java | 40 +- .../InternalMedianAbsoluteDeviationTests.java | 4 +- .../metrics/InternalMinTests.java | 40 +- .../InternalPercentilesRanksTestCase.java | 4 +- .../metrics/InternalScriptedMetricTests.java | 89 +- .../metrics/InternalStatsBucketTests.java | 11 +- .../metrics/InternalStatsTests.java | 154 +- .../metrics/InternalSumTests.java | 42 +- .../InternalTDigestPercentilesRanksTests.java | 68 +- .../InternalTDigestPercentilesTests.java | 81 +- .../metrics/InternalTopHitsTests.java | 180 +- .../metrics/InternalValueCountTests.java | 40 +- .../metrics/InternalWeightedAvgTests.java | 58 +- .../metrics/MaxAggregatorTests.java | 138 +- ...edianAbsoluteDeviationAggregatorTests.java | 206 +- .../metrics/MedianAbsoluteDeviationTests.java | 5 +- .../metrics/MetricAggScriptPlugin.java | 8 +- .../metrics/MinAggregatorTests.java | 149 +- .../metrics/PercentilesMethodTests.java | 3 +- .../metrics/PercentilesTests.java | 30 +- .../ScriptedMetricAggregatorTests.java | 284 ++- .../metrics/ScriptedMetricTests.java | 6 +- .../metrics/StatsAggregatorTests.java | 299 ++- .../metrics/SumAggregatorTests.java | 185 +- ...TDigestPercentileRanksAggregatorTests.java | 40 +- .../TDigestPercentilesAggregatorTests.java | 27 +- .../metrics/TDigestStateTests.java | 2 +- .../metrics/TopHitsAggregatorTests.java | 29 +- .../aggregations/metrics/TopHitsTests.java | 93 +- .../metrics/ValueCountAggregatorTests.java | 56 +- .../metrics/WeightedAvgAggregatorTests.java | 382 ++-- .../WeightedAvgAggregationBuilderTests.java | 3 +- .../AbstractBucketMetricsTestCase.java | 4 +- .../pipeline/AvgBucketAggregatorTests.java | 21 +- .../aggregations/pipeline/AvgBucketTests.java | 25 +- .../pipeline/BucketHelpersTests.java | 34 +- .../pipeline/BucketScriptAggregatorTests.java | 51 +- ...ScriptPipelineAggregationBuilderTests.java | 6 +- .../pipeline/BucketScriptTests.java | 64 +- .../pipeline/BucketSelectorTests.java | 14 +- .../pipeline/BucketSortTests.java | 30 +- .../CumulativeSumAggregatorTests.java | 64 +- .../pipeline/CumulativeSumTests.java | 25 +- .../pipeline/DerivativeAggregatorTests.java | 810 ++++--- .../pipeline/DerivativeTests.java | 17 +- .../pipeline/ExtendedStatsBucketTests.java | 40 +- .../aggregations/pipeline/GapPolicyTests.java | 3 +- .../InternalBucketMetricValueTests.java | 53 +- .../pipeline/InternalDerivativeTests.java | 51 +- .../InternalExtendedStatsBucketTests.java | 15 +- .../InternalPercentilesBucketTests.java | 125 +- .../pipeline/InternalSimpleValueTests.java | 47 +- .../aggregations/pipeline/MaxBucketTests.java | 25 +- .../aggregations/pipeline/MinBucketTests.java | 25 +- .../pipeline/MovFnAggrgatorTests.java | 21 +- ...eAggregationBuilderSerializationTests.java | 26 +- .../MovFnWhitelistedFunctionTests.java | 66 +- .../pipeline/PercentilesBucketTests.java | 39 +- .../PipelineAggregationHelperTests.java | 12 +- .../pipeline/SerialDifferenceTests.java | 25 +- .../pipeline/StatsBucketTests.java | 28 +- .../aggregations/pipeline/SumBucketTests.java | 25 +- .../support/CoreValuesSourceTypeTests.java | 4 +- .../support/IncludeExcludeTests.java | 17 +- .../support/MissingValuesTests.java | 11 +- .../MultiValuesSourceFieldConfigTests.java | 12 +- .../support/ScriptValuesTests.java | 3 +- .../aggregations/support/ValueTypeTests.java | 1 - .../support/ValuesSourceRegistryTests.java | 5 +- .../xpack/analytics/AnalyticsPlugin.java | 88 +- .../xpack/analytics/AnalyticsUsage.java | 5 +- .../action/AnalyticsUsageTransportAction.java | 40 +- .../action/TransportAnalyticsStatsAction.java | 38 +- .../AnalyticsAggregatorFactory.java | 96 +- .../HistoBackedHistogramAggregator.java | 21 +- .../range/HistoBackedRangeAggregator.java | 33 +- ...ctHistoBackedHDRPercentilesAggregator.java | 21 +- ...stoBackedTDigestPercentilesAggregator.java | 23 +- .../metrics/HistoBackedAvgAggregator.java | 5 +- ...stoBackedHDRPercentileRanksAggregator.java | 14 +- .../HistoBackedHDRPercentilesAggregator.java | 14 +- .../metrics/HistoBackedMaxAggregator.java | 2 +- .../metrics/HistoBackedMinAggregator.java | 2 +- .../metrics/HistoBackedSumAggregator.java | 5 +- ...ackedTDigestPercentileRanksAggregator.java | 20 +- ...stoBackedTDigestPercentilesAggregator.java | 20 +- .../HistoBackedValueCountAggregator.java | 16 +- .../support/AnalyticsValuesSourceType.java | 6 +- .../boxplot/BoxplotAggregationBuilder.java | 34 +- .../analytics/boxplot/BoxplotAggregator.java | 23 +- .../boxplot/BoxplotAggregatorFactory.java | 31 +- .../boxplot/BoxplotAggregatorSupplier.java | 16 +- .../analytics/boxplot/InternalBoxplot.java | 1 - ...CardinalityPipelineAggregationBuilder.java | 12 +- ...mulativeCardinalityPipelineAggregator.java | 24 +- .../mapper/HistogramFieldMapper.java | 122 +- ...PercentilesPipelineAggregationBuilder.java | 14 +- .../MovingPercentilesPipelineAggregator.java | 122 +- .../MultiTermsAggregationBuilder.java | 18 +- .../MultiTermsAggregationFactory.java | 44 +- .../multiterms/MultiTermsAggregator.java | 2 +- .../NormalizePipelineAggregationBuilder.java | 26 +- .../NormalizePipelineAggregator.java | 18 +- .../normalize/NormalizePipelineMethods.java | 3 +- .../rate/AbstractRateAggregator.java | 2 +- .../rate/RateAggregationBuilder.java | 35 +- .../analytics/rate/RateAggregatorFactory.java | 10 +- .../xpack/analytics/rate/RateMode.java | 3 +- .../stringstats/InternalStringStats.java | 51 +- .../StringStatsAggregationBuilder.java | 42 +- .../stringstats/StringStatsAggregator.java | 44 +- .../StringStatsAggregatorFactory.java | 29 +- .../StringStatsAggregatorSupplier.java | 16 +- .../topmetrics/InternalTopMetrics.java | 27 +- .../TopMetricsAggregationBuilder.java | 64 +- .../topmetrics/TopMetricsAggregator.java | 6 +- .../TopMetricsAggregatorFactory.java | 34 +- .../xpack/analytics/ttest/InternalTTest.java | 1 - .../ttest/PairedTTestAggregator.java | 20 +- .../analytics/ttest/PairedTTestState.java | 8 +- .../ttest/TTestAggregationBuilder.java | 40 +- .../analytics/ttest/TTestAggregator.java | 14 +- .../ttest/TTestAggregatorFactory.java | 24 +- .../xpack/analytics/ttest/TTestStats.java | 4 +- .../analytics/ttest/TTestStatsBuilder.java | 4 +- .../xpack/analytics/ttest/TTestType.java | 4 +- .../ttest/UnpairedTTestAggregator.java | 31 +- .../analytics/ttest/UnpairedTTestState.java | 16 +- .../xpack/analytics/AnalyticsTestsUtils.java | 15 +- .../AnalyticsInfoTransportActionTests.java | 30 +- ...AnalyticsStatsActionNodeResponseTests.java | 2 +- .../TransportAnalyticsStatsActionTests.java | 19 +- .../HistoBackedHistogramAggregatorTests.java | 78 +- .../HistoBackedRangeAggregatorTests.java | 131 +- ...regatedPercentileRanksAggregatorTests.java | 24 +- ...eAggregatedPercentilesAggregatorTests.java | 108 +- .../HistoBackedAvgAggregatorTests.java | 55 +- .../HistoBackedMaxAggregatorTests.java | 75 +- .../HistoBackedMinAggregatorTests.java | 75 +- .../HistoBackedSumAggregatorTests.java | 75 +- .../HistoBackedValueCountAggregatorTests.java | 76 +- .../HistogramPercentileAggregationTests.java | 138 +- ...regatedPercentileRanksAggregatorTests.java | 32 +- ...eAggregatedPercentilesAggregatorTests.java | 95 +- .../BoxplotAggregationBuilderTests.java | 19 +- .../boxplot/BoxplotAggregatorTests.java | 102 +- .../boxplot/InternalBoxplotTests.java | 15 +- .../CumulativeCardinalityAggregatorTests.java | 67 +- .../CumulativeCardinalityTests.java | 33 +- .../mapper/HistogramFieldMapperTests.java | 34 +- ...ingPercentilesAbstractAggregatorTests.java | 23 +- .../MovingPercentilesHDRAggregatorTests.java | 12 +- ...vingPercentilesTDigestAggregatorTests.java | 13 +- .../MovingPercentilesTests.java | 38 +- .../multiterms/InternalMultiTermsTests.java | 32 +- .../MultiTermsAggregationBuilderTests.java | 34 +- .../multiterms/MultiTermsAggregatorTests.java | 2 +- .../normalize/NormalizeAggregatorTests.java | 94 +- .../NormalizePipelineMethodsTests.java | 74 +- .../analytics/normalize/NormalizeTests.java | 35 +- .../analytics/rate/InternalRateTests.java | 14 +- .../rate/RateAggregationBuilderTests.java | 2 +- .../analytics/rate/RateAggregatorTests.java | 124 +- .../stringstats/InternalStringStatsTests.java | 99 +- .../StringStatsAggregationBuilderTests.java | 39 +- .../StringStatsAggregatorTests.java | 99 +- .../InternalTopMetricsReduceTests.java | 14 +- .../topmetrics/InternalTopMetricsTests.java | 427 ++-- .../TopMetricsAggregationBuilderTests.java | 62 +- .../TopMetricsAggregatorMetricsTests.java | 2 +- .../topmetrics/TopMetricsAggregatorTests.java | 261 ++- .../analytics/ttest/InternalTTestTests.java | 14 +- .../ttest/TTestAggregationBuilderTests.java | 21 +- .../analytics/ttest/TTestAggregatorTests.java | 357 +-- .../elasticsearch/xpack/rollup/Rollup.java | 102 +- .../rollup/RollupJobIdentifierUtils.java | 87 +- .../xpack/rollup/RollupRequestTranslator.java | 78 +- .../rollup/RollupResponseTranslator.java | 300 ++- .../rollup/RollupUsageTransportAction.java | 29 +- .../xpack/rollup/action/RollupIndexCaps.java | 61 +- .../TransportDeleteRollupJobAction.java | 59 +- .../action/TransportGetRollupCapsAction.java | 11 +- .../TransportGetRollupIndexCapsAction.java | 67 +- .../action/TransportGetRollupJobAction.java | 60 +- .../action/TransportPutRollupJobAction.java | 286 ++- .../action/TransportRollupSearchAction.java | 136 +- .../action/TransportStartRollupAction.java | 51 +- .../action/TransportStopRollupAction.java | 104 +- .../rollup/action/TransportTaskHelper.java | 7 +- .../xpack/rollup/job/IndexerUtils.java | 41 +- .../xpack/rollup/job/RollupIDGenerator.java | 3 +- .../xpack/rollup/job/RollupIndexer.java | 63 +- .../xpack/rollup/job/RollupJobTask.java | 207 +- .../rest/RestDeleteRollupJobAction.java | 22 +- .../rollup/rest/RestGetRollupCapsAction.java | 4 +- .../rest/RestGetRollupIndexCapsAction.java | 6 +- .../rollup/rest/RestGetRollupJobsAction.java | 5 +- .../rollup/rest/RestPutRollupJobAction.java | 5 +- .../rollup/rest/RestRollupSearchAction.java | 15 +- .../rollup/rest/RestStartRollupJobAction.java | 3 +- .../rollup/rest/RestStopRollupJobAction.java | 3 +- .../rollup/v2/CompressingOfflineSorter.java | 7 +- .../xpack/rollup/v2/FieldValueFetcher.java | 9 +- .../xpack/rollup/v2/RestRollupAction.java | 2 +- .../xpack/rollup/v2/RollupShardIndexer.java | 74 +- .../rollup/v2/TransportRollupAction.java | 184 +- .../v2/TransportRollupIndexerAction.java | 69 +- .../xpack/rollup/v2/XExternalRefSorter.java | 12 +- .../xpack/rollup/LocalStateRollup.java | 1 - .../RollupInfoTransportActionTests.java | 9 +- .../rollup/RollupJobIdentifierUtilTests.java | 510 +++-- .../rollup/RollupRequestTranslationTests.java | 185 +- .../RollupResponseTranslationTests.java | 1195 +++++----- .../action/DeleteJobActionRequestTests.java | 2 - .../action/GetJobsActionRequestTests.java | 56 +- .../GetRollupCapsActionRequestTests.java | 96 +- .../GetRollupIndexCapsActionRequestTests.java | 58 +- .../action/PutJobActionRequestTests.java | 3 +- .../action/PutJobStateMachineTests.java | 230 +- .../rollup/action/RollupIndexCapsTests.java | 14 +- .../rollup/action/SearchActionTests.java | 472 ++-- .../action/StartJobActionRequestTests.java | 1 - .../action/TransportTaskHelperTests.java | 32 +- .../rollup/action/job/RollupIndexTests.java | 2 +- .../xpack/rollup/config/ConfigTests.java | 13 +- .../xpack/rollup/job/IndexerUtilsTests.java | 95 +- .../job/RollupIndexerIndexingTests.java | 697 ++++-- .../rollup/job/RollupIndexerStateTests.java | 198 +- .../xpack/rollup/job/RollupIndexerTests.java | 1 - .../xpack/rollup/job/RollupJobTaskTests.java | 529 +++-- .../v2/RollupActionSingleNodeTests.java | 145 +- 852 files changed, 34811 insertions(+), 24649 deletions(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.formatting.gradle b/build-tools-internal/src/main/groovy/elasticsearch.formatting.gradle index 709b2033fd7a0..9227dcc459e72 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.formatting.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.formatting.gradle @@ -59,7 +59,6 @@ def projectPathsToExclude = [ ':libs:elasticsearch-secure-sm', ':libs:elasticsearch-ssl-config', ':libs:elasticsearch-x-content', - ':modules:aggs-matrix-stats', ':modules:analysis-common', ':modules:ingest-common', ':modules:ingest-geoip', @@ -109,7 +108,6 @@ def projectPathsToExclude = [ ':test:logger-usage', ':x-pack:license-tools', ':x-pack:plugin', - ':x-pack:plugin:analytics', ':x-pack:plugin:async-search', ':x-pack:plugin:async-search:qa', ':x-pack:plugin:async-search:qa:security', @@ -152,8 +150,6 @@ def projectPathsToExclude = [ ':x-pack:plugin:repository-encrypted:qa:azure', ':x-pack:plugin:repository-encrypted:qa:gcs', ':x-pack:plugin:repository-encrypted:qa:s3', - ':x-pack:plugin:rollup', - ':x-pack:plugin:rollup:qa:rest', ':x-pack:plugin:search-business-rules', ':x-pack:plugin:security', ':x-pack:plugin:security:cli', @@ -202,6 +198,7 @@ subprojects { if (projectPathsToExclude.contains(project.path) == false) { project.apply plugin: "com.diffplug.spotless" + spotless { java { if (project.path == ':server') { @@ -209,9 +206,8 @@ subprojects { 'src/*/java/org/elasticsearch/action/admin/cluster/snapshots/**/*.java', 'src/*/java/org/elasticsearch/index/snapshots/**/*.java', 'src/*/java/org/elasticsearch/repositories/**/*.java', + 'src/*/java/org/elasticsearch/search/aggregations/**/*.java', 'src/*/java/org/elasticsearch/snapshots/**/*.java' - - targetExclude 'src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java' } else { // Normally this isn't necessary, but we have Java sources in // non-standard places diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationPlugin.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationPlugin.java index c1b24c1fff86c..57ff7d227ceed 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationPlugin.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationPlugin.java @@ -21,7 +21,9 @@ public class MatrixAggregationPlugin extends Plugin implements SearchPlugin { @Override public List getAggregations() { - return singletonList(new AggregationSpec(MatrixStatsAggregationBuilder.NAME, MatrixStatsAggregationBuilder::new, - new MatrixStatsParser()).addResultReader(InternalMatrixStats::new)); + return singletonList( + new AggregationSpec(MatrixStatsAggregationBuilder.NAME, MatrixStatsAggregationBuilder::new, new MatrixStatsParser()) + .addResultReader(InternalMatrixStats::new) + ); } } diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/spi/MatrixStatsNamedXContentProvider.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/spi/MatrixStatsNamedXContentProvider.java index ce7da6c2576f0..659de22577a57 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/spi/MatrixStatsNamedXContentProvider.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/spi/MatrixStatsNamedXContentProvider.java @@ -8,9 +8,9 @@ package org.elasticsearch.search.aggregations.matrix.spi; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ContextParser; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.plugins.spi.NamedXContentProvider; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder; diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java index 5fa90395fd76d..15264b9b51c93 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java @@ -30,8 +30,13 @@ public class InternalMatrixStats extends InternalAggregation implements MatrixSt private final MatrixStatsResults results; /** per shard ctor */ - InternalMatrixStats(String name, long count, RunningStats multiFieldStatsResults, MatrixStatsResults results, - Map metadata) { + InternalMatrixStats( + String name, + long count, + RunningStats multiFieldStatsResults, + MatrixStatsResults results, + Map metadata + ) { super(name, metadata); assert count >= 0; this.stats = multiFieldStatsResults; @@ -224,7 +229,7 @@ public Object getProperty(List path) { public InternalAggregation reduce(List aggregations, ReduceContext reduceContext) { // merge stats across all shards List aggs = new ArrayList<>(aggregations); - aggs.removeIf(p -> ((InternalMatrixStats)p).stats == null); + aggs.removeIf(p -> ((InternalMatrixStats) p).stats == null); // return empty result iff all stats are null if (aggs.isEmpty()) { @@ -260,7 +265,6 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalMatrixStats other = (InternalMatrixStats) obj; - return Objects.equals(this.stats, other.stats) && - Objects.equals(this.results, other.results); + return Objects.equals(this.stats, other.stats) && Objects.equals(this.results, other.results); } } diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStats.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStats.java index 686ed3a36bd7d..b423fa2e5caf4 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStats.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStats.java @@ -15,18 +15,25 @@ public interface MatrixStats extends Aggregation { /** return the total document count */ long getDocCount(); + /** return total field count (differs from docCount if there are missing values) */ long getFieldCount(String field); + /** return the field mean */ double getMean(String field); + /** return the field variance */ double getVariance(String field); + /** return the skewness of the distribution */ double getSkewness(String field); + /** return the kurtosis of the distribution */ double getKurtosis(String field); + /** return the covariance between field x and field y */ double getCovariance(String fieldX, String fieldY); + /** return the correlation coefficient of field x and field y */ double getCorrelation(String fieldX, String fieldY); } diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java index b779ae8e2578e..63ac6cfacef38 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java @@ -22,8 +22,7 @@ import java.io.IOException; import java.util.Map; -public class MatrixStatsAggregationBuilder - extends ArrayValuesSourceAggregationBuilder.LeafOnly { +public class MatrixStatsAggregationBuilder extends ArrayValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = "matrix_stats"; private MultiValueMode multiValueMode = MultiValueMode.AVG; @@ -32,8 +31,11 @@ public MatrixStatsAggregationBuilder(String name) { super(name); } - protected MatrixStatsAggregationBuilder(MatrixStatsAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, Map metadata) { + protected MatrixStatsAggregationBuilder( + MatrixStatsAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.multiValueMode = clone.multiValueMode; } @@ -65,10 +67,12 @@ public MultiValueMode multiValueMode() { } @Override - protected MatrixStatsAggregatorFactory innerBuild(AggregationContext context, - Map configs, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { + protected MatrixStatsAggregatorFactory innerBuild( + AggregationContext context, + Map configs, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { return new MatrixStatsAggregatorFactory(name, configs, multiValueMode, context, parent, subFactoriesBuilder, metadata); } diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java index 211e0a0d0a94c..f55dc47c84677 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java @@ -9,8 +9,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.aggregations.Aggregator; @@ -35,8 +35,14 @@ final class MatrixStatsAggregator extends MetricsAggregator { /** array of descriptive stats, per shard, needed to compute the correlation */ ObjectArray stats; - MatrixStatsAggregator(String name, Map valuesSources, AggregationContext context, - Aggregator parent, MultiValueMode multiValueMode, Map metadata) throws IOException { + MatrixStatsAggregator( + String name, + Map valuesSources, + AggregationContext context, + Aggregator parent, + MultiValueMode multiValueMode, + Map metadata + ) throws IOException { super(name, context, parent, metadata); if (valuesSources != null && valuesSources.isEmpty() == false) { this.valuesSources = new NumericArrayValuesSource(valuesSources, multiValueMode); @@ -52,8 +58,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSources == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java index 2cbcc3992fa7d..6fd85401b8a41 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java @@ -26,13 +26,15 @@ final class MatrixStatsAggregatorFactory extends ArrayValuesSourceAggregatorFact private final MultiValueMode multiValueMode; - MatrixStatsAggregatorFactory(String name, - Map configs, - MultiValueMode multiValueMode, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata) throws IOException { + MatrixStatsAggregatorFactory( + String name, + Map configs, + MultiValueMode multiValueMode, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { super(name, configs, context, parent, subFactoriesBuilder, metadata); this.multiValueMode = multiValueMode; } @@ -43,15 +45,18 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal(Map valuesSources, - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + protected Aggregator doCreateInternal( + Map valuesSources, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { Map typedValuesSources = new HashMap<>(valuesSources.size()); for (Map.Entry entry : valuesSources.entrySet()) { if (entry.getValue() instanceof ValuesSource.Numeric == false) { - throw new AggregationExecutionException("ValuesSource type " + entry.getValue().toString() + - "is not supported for aggregation " + this.name()); + throw new AggregationExecutionException( + "ValuesSource type " + entry.getValue().toString() + "is not supported for aggregation " + this.name() + ); } // TODO: There must be a better option than this. typedValuesSources.put(entry.getKey(), (ValuesSource.Numeric) entry.getValue()); diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsParser.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsParser.java index 80649737504a2..1001e39cde00b 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsParser.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsParser.java @@ -26,8 +26,13 @@ public MatrixStatsParser() { } @Override - protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser, - Map otherOptions) throws IOException { + protected boolean token( + String aggregationName, + String currentFieldName, + XContentParser.Token token, + XContentParser parser, + Map otherOptions + ) throws IOException { if (MULTIVALUE_MODE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { if (token == XContentParser.Token.VALUE_STRING) { otherOptions.put(MULTIVALUE_MODE_FIELD, parser.text()); @@ -38,10 +43,14 @@ protected boolean token(String aggregationName, String currentFieldName, XConten } @Override - protected MatrixStatsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { + protected MatrixStatsAggregationBuilder createFactory( + String aggregationName, + ValuesSourceType valuesSourceType, + ValueType targetValueType, + Map otherOptions + ) { MatrixStatsAggregationBuilder builder = new MatrixStatsAggregationBuilder(aggregationName); - String mode = (String)otherOptions.get(MULTIVALUE_MODE_FIELD); + String mode = (String) otherOptions.get(MULTIVALUE_MODE_FIELD); if (mode != null) { builder.multiValueMode(MultiValueMode.fromString(mode)); } diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java index f98001179e40c..563001d4cc9f3 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java @@ -189,7 +189,7 @@ private void compute() { // update skewness results.skewness.put(fieldName, Math.sqrt(results.docCount) * results.skewness.get(fieldName) / Math.pow(var, 1.5D)); // update kurtosis - results.kurtosis.put(fieldName, (double)results.docCount * results.kurtosis.get(fieldName) / (var * var)); + results.kurtosis.put(fieldName, (double) results.docCount * results.kurtosis.get(fieldName) / (var * var)); // update variances results.variances.put(fieldName, results.variances.get(fieldName) / nM1); } @@ -224,8 +224,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; MatrixStatsResults that = (MatrixStatsResults) o; - return Objects.equals(results, that.results) && - Objects.equals(correlation, that.correlation); + return Objects.equals(results, that.results) && Objects.equals(correlation, that.correlation); } @Override diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/ParsedMatrixStats.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/ParsedMatrixStats.java index 9386b15fc37ae..fe0f59cb17db4 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/ParsedMatrixStats.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/ParsedMatrixStats.java @@ -8,8 +8,8 @@ package org.elasticsearch.search.aggregations.matrix.stats; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.ParsedAggregation; @@ -140,8 +140,11 @@ private static T checkedGet(final Map values, final String fieldN return values.get(fieldName); } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedMatrixStats.class.getSimpleName(), true, ParsedMatrixStats::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedMatrixStats.class.getSimpleName(), + true, + ParsedMatrixStats::new + ); static { declareAggregationFields(PARSER); PARSER.declareLong(ParsedMatrixStats::setDocCount, CommonFields.DOC_COUNT); @@ -176,21 +179,27 @@ static class ParsedMatrixStatsResult { Map covariances; Map correlations; - private static final ObjectParser RESULT_PARSER = - new ObjectParser<>(ParsedMatrixStatsResult.class.getSimpleName(), true, ParsedMatrixStatsResult::new); + private static final ObjectParser RESULT_PARSER = new ObjectParser<>( + ParsedMatrixStatsResult.class.getSimpleName(), + true, + ParsedMatrixStatsResult::new + ); static { - RESULT_PARSER.declareString((result, name) -> result.name = name, - new ParseField(InternalMatrixStats.Fields.NAME)); - RESULT_PARSER.declareLong((result, count) -> result.count = count, - new ParseField(InternalMatrixStats.Fields.COUNT)); - RESULT_PARSER.declareDouble((result, mean) -> result.mean = mean, - new ParseField(InternalMatrixStats.Fields.MEAN)); - RESULT_PARSER.declareDouble((result, variance) -> result.variance = variance, - new ParseField(InternalMatrixStats.Fields.VARIANCE)); - RESULT_PARSER.declareDouble((result, skewness) -> result.skewness = skewness, - new ParseField(InternalMatrixStats.Fields.SKEWNESS)); - RESULT_PARSER.declareDouble((result, kurtosis) -> result.kurtosis = kurtosis, - new ParseField(InternalMatrixStats.Fields.KURTOSIS)); + RESULT_PARSER.declareString((result, name) -> result.name = name, new ParseField(InternalMatrixStats.Fields.NAME)); + RESULT_PARSER.declareLong((result, count) -> result.count = count, new ParseField(InternalMatrixStats.Fields.COUNT)); + RESULT_PARSER.declareDouble((result, mean) -> result.mean = mean, new ParseField(InternalMatrixStats.Fields.MEAN)); + RESULT_PARSER.declareDouble( + (result, variance) -> result.variance = variance, + new ParseField(InternalMatrixStats.Fields.VARIANCE) + ); + RESULT_PARSER.declareDouble( + (result, skewness) -> result.skewness = skewness, + new ParseField(InternalMatrixStats.Fields.SKEWNESS) + ); + RESULT_PARSER.declareDouble( + (result, kurtosis) -> result.kurtosis = kurtosis, + new ParseField(InternalMatrixStats.Fields.KURTOSIS) + ); RESULT_PARSER.declareObject((ParsedMatrixStatsResult result, Map covars) -> { result.covariances = new LinkedHashMap<>(covars.size()); diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStats.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStats.java index 4795175ee55cc..6f719bbb2ccb3 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStats.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStats.java @@ -69,25 +69,25 @@ private void init() { public RunningStats(StreamInput in) throws IOException { this(); // read doc count - docCount = (Long)in.readGenericValue(); + docCount = (Long) in.readGenericValue(); // read fieldSum - fieldSum = convertIfNeeded((Map)in.readGenericValue()); + fieldSum = convertIfNeeded((Map) in.readGenericValue()); // counts - counts = convertIfNeeded((Map)in.readGenericValue()); + counts = convertIfNeeded((Map) in.readGenericValue()); // means - means = convertIfNeeded((Map)in.readGenericValue()); + means = convertIfNeeded((Map) in.readGenericValue()); // variances - variances = convertIfNeeded((Map)in.readGenericValue()); + variances = convertIfNeeded((Map) in.readGenericValue()); // skewness - skewness = convertIfNeeded((Map)in.readGenericValue()); + skewness = convertIfNeeded((Map) in.readGenericValue()); // kurtosis - kurtosis = convertIfNeeded((Map)in.readGenericValue()); + kurtosis = convertIfNeeded((Map) in.readGenericValue()); // read covariances - covariances = convertIfNeeded((Map>)in.readGenericValue()); + covariances = convertIfNeeded((Map>) in.readGenericValue()); } // Convert Map to HashMap if it isn't - private static HashMap convertIfNeeded(Map map) { + private static HashMap convertIfNeeded(Map map) { if (map instanceof HashMap) { return (HashMap) map; } else { @@ -211,7 +211,7 @@ public void merge(final RunningStats other) { this.counts.put(fieldName, other.counts.get(fieldName).longValue()); this.fieldSum.put(fieldName, other.fieldSum.get(fieldName).doubleValue()); this.variances.put(fieldName, other.variances.get(fieldName).doubleValue()); - this.skewness.put(fieldName , other.skewness.get(fieldName).doubleValue()); + this.skewness.put(fieldName, other.skewness.get(fieldName).doubleValue()); this.kurtosis.put(fieldName, other.kurtosis.get(fieldName).doubleValue()); if (other.covariances.containsKey(fieldName)) { this.covariances.put(fieldName, other.covariances.get(fieldName)); @@ -314,14 +314,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RunningStats that = (RunningStats) o; - return docCount == that.docCount && - Objects.equals(fieldSum, that.fieldSum) && - Objects.equals(counts, that.counts) && - Objects.equals(means, that.means) && - Objects.equals(variances, that.variances) && - Objects.equals(skewness, that.skewness) && - Objects.equals(kurtosis, that.kurtosis) && - Objects.equals(covariances, that.covariances); + return docCount == that.docCount + && Objects.equals(fieldSum, that.fieldSum) + && Objects.equals(counts, that.counts) + && Objects.equals(means, that.means) + && Objects.equals(variances, that.variances) + && Objects.equals(skewness, that.skewness) + && Objects.equals(kurtosis, that.kurtosis) + && Objects.equals(covariances, that.covariances); } @Override diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregationBuilder.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregationBuilder.java index b12be3e6887f8..976719380e361 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregationBuilder.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregationBuilder.java @@ -7,9 +7,9 @@ */ package org.elasticsearch.search.aggregations.support; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationInitializationException; @@ -25,13 +25,13 @@ import java.util.Map; import java.util.Objects; -public abstract class ArrayValuesSourceAggregationBuilder> - extends AbstractAggregationBuilder { +public abstract class ArrayValuesSourceAggregationBuilder> extends + AbstractAggregationBuilder { public static final ParseField MULTIVALUE_MODE_FIELD = new ParseField("mode"); - public abstract static class LeafOnly> - extends ArrayValuesSourceAggregationBuilder { + public abstract static class LeafOnly> extends ArrayValuesSourceAggregationBuilder< + AB> { protected LeafOnly(String name) { super(name); @@ -40,8 +40,9 @@ protected LeafOnly(String name) { protected LeafOnly(LeafOnly clone, Builder factoriesBuilder, Map metadata) { super(clone, factoriesBuilder, metadata); if (factoriesBuilder.count() > 0) { - throw new AggregationInitializationException("Aggregator [" + name + "] of type [" - + getType() + "] cannot accept sub-aggregations"); + throw new AggregationInitializationException( + "Aggregator [" + name + "] of type [" + getType() + "] cannot accept sub-aggregations" + ); } } @@ -54,8 +55,9 @@ protected LeafOnly(StreamInput in) throws IOException { @Override public AB subAggregations(Builder subFactories) { - throw new AggregationInitializationException("Aggregator [" + name + "] of type [" + - getType() + "] cannot accept sub-aggregations"); + throw new AggregationInitializationException( + "Aggregator [" + name + "] of type [" + getType() + "] cannot accept sub-aggregations" + ); } @Override @@ -77,8 +79,11 @@ protected ArrayValuesSourceAggregationBuilder(String name) { super(name); } - protected ArrayValuesSourceAggregationBuilder(ArrayValuesSourceAggregationBuilder clone, - Builder factoriesBuilder, Map metadata) { + protected ArrayValuesSourceAggregationBuilder( + ArrayValuesSourceAggregationBuilder clone, + Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.fields = new ArrayList<>(clone.fields); this.userValueTypeHint = clone.userValueTypeHint; @@ -87,8 +92,7 @@ protected ArrayValuesSourceAggregationBuilder(ArrayValuesSourceAggregationBuilde this.missing = clone.missing; } - protected ArrayValuesSourceAggregationBuilder(StreamInput in) - throws IOException { + protected ArrayValuesSourceAggregationBuilder(StreamInput in) throws IOException { super(in); read(in); } @@ -98,7 +102,7 @@ protected ArrayValuesSourceAggregationBuilder(StreamInput in) */ @SuppressWarnings("unchecked") private void read(StreamInput in) throws IOException { - fields = (ArrayList)in.readGenericValue(); + fields = (ArrayList) in.readGenericValue(); userValueTypeHint = in.readOptionalWriteable(ValueType::readFromStream); format = in.readOptionalString(); missingMap = in.readMap(); @@ -178,8 +182,11 @@ public Map missingMap() { } @Override - protected final ArrayValuesSourceAggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, - Builder subFactoriesBuilder) throws IOException { + protected final ArrayValuesSourceAggregatorFactory doBuild( + AggregationContext context, + AggregatorFactory parent, + Builder subFactoriesBuilder + ) throws IOException { Map configs = resolveConfig(context); ArrayValuesSourceAggregatorFactory factory = innerBuild(context, configs, parent, subFactoriesBuilder); return factory; @@ -188,17 +195,27 @@ protected final ArrayValuesSourceAggregatorFactory doBuild(AggregationContext co protected Map resolveConfig(AggregationContext context) { HashMap configs = new HashMap<>(); for (String field : fields) { - ValuesSourceConfig config = ValuesSourceConfig.resolveUnregistered(context, userValueTypeHint, field, null, - missingMap.get(field), null, format, CoreValuesSourceType.KEYWORD); + ValuesSourceConfig config = ValuesSourceConfig.resolveUnregistered( + context, + userValueTypeHint, + field, + null, + missingMap.get(field), + null, + format, + CoreValuesSourceType.KEYWORD + ); configs.put(field, config); } return configs; } - protected abstract ArrayValuesSourceAggregatorFactory innerBuild(AggregationContext context, - Map configs, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException; + protected abstract ArrayValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + Map configs, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException; @Override public final XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregatorFactory.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregatorFactory.java index 521647d7ba372..da7cf0ee6a940 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregatorFactory.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregatorFactory.java @@ -17,15 +17,18 @@ import java.util.HashMap; import java.util.Map; -public abstract class ArrayValuesSourceAggregatorFactory - extends AggregatorFactory { +public abstract class ArrayValuesSourceAggregatorFactory extends AggregatorFactory { protected Map configs; - public ArrayValuesSourceAggregatorFactory(String name, Map configs, - AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata) throws IOException { + public ArrayValuesSourceAggregatorFactory( + String name, + Map configs, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { super(name, context, parent, subFactoriesBuilder, metadata); this.configs = configs; } diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceParser.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceParser.java index fcd0b4e124081..cfd481a5fb4e1 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceParser.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceParser.java @@ -8,8 +8,8 @@ package org.elasticsearch.search.aggregations.support; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder.CommonFields; @@ -56,8 +56,7 @@ private ArrayValuesSourceParser(boolean formattable, ValuesSourceType valuesSour } @Override - public final ArrayValuesSourceAggregationBuilder parse(String aggregationName, XContentParser parser) - throws IOException { + public final ArrayValuesSourceAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { List fields = null; String format = null; @@ -74,12 +73,22 @@ public final ArrayValuesSourceAggregationBuilder parse(String aggregationName } else if (formattable && CommonFields.FORMAT.match(currentFieldName, parser.getDeprecationHandler())) { format = parser.text(); } else if (CommonFields.VALUE_TYPE.match(currentFieldName, parser.getDeprecationHandler())) { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " + - "Multi-field aggregations do not support scripts."); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + + token + + " [" + + currentFieldName + + "] in [" + + aggregationName + + "]. " + + "Multi-field aggregations do not support scripts." + ); } else if (token(aggregationName, currentFieldName, token, parser, otherOptions) == false) { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]." + ); } } else if (token == XContentParser.Token.START_OBJECT) { if (CommonFields.MISSING.match(currentFieldName, parser.getDeprecationHandler())) { @@ -88,41 +97,69 @@ public final ArrayValuesSourceAggregationBuilder parse(String aggregationName parseMissingAndAdd(aggregationName, currentFieldName, parser, missingMap); } } else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " + - "Multi-field aggregations do not support scripts."); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + + token + + " [" + + currentFieldName + + "] in [" + + aggregationName + + "]. " + + "Multi-field aggregations do not support scripts." + ); } else if (token(aggregationName, currentFieldName, token, parser, otherOptions) == false) { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]." + ); } } else if (token == XContentParser.Token.START_ARRAY) { if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " + - "Multi-field aggregations do not support scripts."); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + + token + + " [" + + currentFieldName + + "] in [" + + aggregationName + + "]. " + + "Multi-field aggregations do not support scripts." + ); } else if (CommonFields.FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { fields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { fields.add(parser.text()); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]." + ); } } } else if (token(aggregationName, currentFieldName, token, parser, otherOptions) == false) { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]." + ); } } else if (token(aggregationName, currentFieldName, token, parser, otherOptions) == false) { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]." + ); } } - ArrayValuesSourceAggregationBuilder factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType, - otherOptions); + ArrayValuesSourceAggregationBuilder factory = createFactory( + aggregationName, + this.valuesSourceType, + this.targetValueType, + otherOptions + ); if (fields != null) { factory.fields(fields); } @@ -135,8 +172,12 @@ public final ArrayValuesSourceAggregationBuilder parse(String aggregationName return factory; } - private void parseMissingAndAdd(final String aggregationName, final String currentFieldName, - XContentParser parser, final Map missing) throws IOException { + private void parseMissingAndAdd( + final String aggregationName, + final String currentFieldName, + XContentParser parser, + final Map missing + ) throws IOException { XContentParser.Token token = parser.currentToken(); if (token == null) { token = parser.nextToken(); @@ -145,15 +186,18 @@ private void parseMissingAndAdd(final String aggregationName, final String curre if (token == XContentParser.Token.FIELD_NAME) { final String fieldName = parser.currentName(); if (missing.containsKey(fieldName)) { - throw new ParsingException(parser.getTokenLocation(), - "Missing field [" + fieldName + "] already defined as [" + missing.get(fieldName) - + "] in [" + aggregationName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Missing field [" + fieldName + "] already defined as [" + missing.get(fieldName) + "] in [" + aggregationName + "]." + ); } parser.nextToken(); missing.put(fieldName, parser.objectText()); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]"); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]" + ); } } @@ -175,10 +219,12 @@ private void parseMissingAndAdd(final String aggregationName, final String curre * method * @return the created factory */ - protected abstract ArrayValuesSourceAggregationBuilder createFactory(String aggregationName, - ValuesSourceType valuesSourceType, - ValueType targetValueType, - Map otherOptions); + protected abstract ArrayValuesSourceAggregationBuilder createFactory( + String aggregationName, + ValuesSourceType valuesSourceType, + ValueType targetValueType, + Map otherOptions + ); /** * Allows subclasses of {@link ArrayValuesSourceParser} to parse extra @@ -203,6 +249,11 @@ protected abstract ArrayValuesSourceAggregationBuilder createFactory(String a * @throws IOException * if an error occurs whilst parsing */ - protected abstract boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser, - Map otherOptions) throws IOException; + protected abstract boolean token( + String aggregationName, + String currentFieldName, + XContentParser.Token token, + XContentParser parser, + Map otherOptions + ) throws IOException; } diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java index 31fda40c212e6..c8055253b9aef 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java @@ -7,13 +7,13 @@ */ package org.elasticsearch.search.aggregations.matrix.stats; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.xcontent.ContextParser; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.ScriptService; @@ -58,8 +58,10 @@ public void setUp() throws Exception { @Override protected List getNamedXContents() { ContextParser parser = (p, c) -> ParsedMatrixStats.fromXContent(p, (String) c); - return CollectionUtils.appendToCopy(getDefaultNamedXContents(), - new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(MatrixStatsAggregationBuilder.NAME), parser)); + return CollectionUtils.appendToCopy( + getDefaultNamedXContents(), + new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(MatrixStatsAggregationBuilder.NAME), parser) + ); } @Override @@ -83,35 +85,35 @@ protected InternalMatrixStats mutateInstance(InternalMatrixStats instance) { MatrixStatsResults matrixStatsResults = instance.getResults(); Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - String[] fields = Arrays.copyOf(this.fields, this.fields.length + 1); - fields[fields.length - 1] = "field_" + (fields.length - 1); - double[] values = new double[fields.length]; - for (int i = 0; i < fields.length; i++) { - values[i] = randomDouble() * 200; - } - runningStats = new RunningStats(); - runningStats.add(fields, values); - break; - case 2: - if (matrixStatsResults == null) { - matrixStatsResults = new MatrixStatsResults(runningStats); - } else { - matrixStatsResults = null; - } - break; - case 3: - default: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + String[] fields = Arrays.copyOf(this.fields, this.fields.length + 1); + fields[fields.length - 1] = "field_" + (fields.length - 1); + double[] values = new double[fields.length]; + for (int i = 0; i < fields.length; i++) { + values[i] = randomDouble() * 200; + } + runningStats = new RunningStats(); + runningStats.add(fields, values); + break; + case 2: + if (matrixStatsResults == null) { + matrixStatsResults = new MatrixStatsResults(runningStats); + } else { + matrixStatsResults = null; + } + break; + case 3: + default: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; } return new InternalMatrixStats(name, docCount, runningStats, matrixStatsResults, metadata); } @@ -135,7 +137,7 @@ public void testReduceRandom() { double valueB = randomDouble(); bValues.add(valueB); - runningStats.add(new String[]{"a", "b"}, new double[]{valueA, valueB}); + runningStats.add(new String[] { "a", "b" }, new double[] { valueA, valueB }); if (++valuePerShardCounter == valuesPerShard) { shardResults.add(new InternalMatrixStats("_name", 1L, runningStats, null, Collections.emptyMap())); runningStats = new RunningStats(); @@ -152,7 +154,11 @@ public void testReduceRandom() { ScriptService mockScriptService = mockScriptService(); MockBigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( - bigArrays, mockScriptService, b -> {}, PipelineTree.EMPTY); + bigArrays, + mockScriptService, + b -> {}, + PipelineTree.EMPTY + ); InternalMatrixStats reduced = (InternalMatrixStats) shardResults.get(0).reduce(shardResults, context); multiPassStats.assertNearlyEqual(reduced.getResults()); } diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java index 1e1e785b92b31..2edaf28beeffc 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java @@ -30,18 +30,17 @@ public class MatrixStatsAggregatorTests extends AggregatorTestCase { public void testNoData() throws Exception { - MappedFieldType ft = - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); - try (Directory directory = newDirectory(); - RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { if (randomBoolean()) { indexWriter.addDocument(Collections.singleton(new StringField("another_field", "value", Field.Store.NO))); } try (IndexReader reader = indexWriter.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg") - .fields(Collections.singletonList("field")); + MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( + Collections.singletonList("field") + ); InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft); assertNull(stats.getStats()); assertEquals(0L, stats.getDocCount()); @@ -52,15 +51,15 @@ public void testNoData() throws Exception { public void testUnmapped() throws Exception { MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); - try (Directory directory = newDirectory(); - RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { if (randomBoolean()) { indexWriter.addDocument(Collections.singleton(new StringField("another_field", "value", Field.Store.NO))); } try (IndexReader reader = indexWriter.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg") - .fields(Collections.singletonList("bogus")); + MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( + Collections.singletonList("bogus") + ); InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft); assertNull(stats.getStats()); assertEquals(0L, stats.getDocCount()); @@ -74,8 +73,7 @@ public void testTwoFields() throws Exception { String fieldB = "b"; MappedFieldType ftB = new NumberFieldMapper.NumberFieldType(fieldB, NumberFieldMapper.NumberType.DOUBLE); - try (Directory directory = newDirectory(); - RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { int numDocs = scaledRandomIntBetween(8192, 16384); Double[] fieldAValues = new Double[numDocs]; @@ -94,8 +92,9 @@ public void testTwoFields() throws Exception { multiPassStats.computeStats(Arrays.asList(fieldAValues), Arrays.asList(fieldBValues)); try (IndexReader reader = indexWriter.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg") - .fields(Arrays.asList(fieldA, fieldB)); + MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( + Arrays.asList(fieldA, fieldB) + ); InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ftA, ftB); multiPassStats.assertNearlyEqual(stats); assertTrue(MatrixAggregationInspectionHelper.hasValue(stats)); diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java index c6cba046635e6..81ec04389a06f 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java @@ -118,8 +118,8 @@ void assertNearlyEqual(MatrixStatsResults stats) { assertTrue(nearlyEqual(kurtosis.get(fieldAKey), stats.getKurtosis(fieldAKey), 1e-4)); assertTrue(nearlyEqual(kurtosis.get(fieldBKey), stats.getKurtosis(fieldBKey), 1e-4)); // covariances - assertTrue(nearlyEqual(covariances.get(fieldAKey).get(fieldBKey),stats.getCovariance(fieldAKey, fieldBKey), 1e-7)); - assertTrue(nearlyEqual(covariances.get(fieldBKey).get(fieldAKey),stats.getCovariance(fieldBKey, fieldAKey), 1e-7)); + assertTrue(nearlyEqual(covariances.get(fieldAKey).get(fieldBKey), stats.getCovariance(fieldAKey, fieldBKey), 1e-7)); + assertTrue(nearlyEqual(covariances.get(fieldBKey).get(fieldAKey), stats.getCovariance(fieldBKey, fieldAKey), 1e-7)); // correlation assertTrue(nearlyEqual(correlations.get(fieldAKey).get(fieldBKey), stats.getCorrelation(fieldAKey, fieldBKey), 1e-7)); assertTrue(nearlyEqual(correlations.get(fieldBKey).get(fieldAKey), stats.getCorrelation(fieldBKey, fieldAKey), 1e-7)); @@ -142,8 +142,8 @@ void assertNearlyEqual(InternalMatrixStats stats) { assertTrue(nearlyEqual(kurtosis.get(fieldAKey), stats.getKurtosis(fieldAKey), 1e-4)); assertTrue(nearlyEqual(kurtosis.get(fieldBKey), stats.getKurtosis(fieldBKey), 1e-4)); // covariances - assertTrue(nearlyEqual(covariances.get(fieldAKey).get(fieldBKey),stats.getCovariance(fieldAKey, fieldBKey), 1e-7)); - assertTrue(nearlyEqual(covariances.get(fieldBKey).get(fieldAKey),stats.getCovariance(fieldBKey, fieldAKey), 1e-7)); + assertTrue(nearlyEqual(covariances.get(fieldAKey).get(fieldBKey), stats.getCovariance(fieldAKey, fieldBKey), 1e-7)); + assertTrue(nearlyEqual(covariances.get(fieldBKey).get(fieldAKey), stats.getCovariance(fieldBKey, fieldAKey), 1e-7)); // correlation assertTrue(nearlyEqual(correlations.get(fieldAKey).get(fieldBKey), stats.getCorrelation(fieldAKey, fieldBKey), 1e-7)); assertTrue(nearlyEqual(correlations.get(fieldBKey).get(fieldAKey), stats.getCorrelation(fieldBKey, fieldAKey), 1e-7)); diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStatsTests.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStatsTests.java index 5ba22156b9df8..3768f00962dd2 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStatsTests.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStatsTests.java @@ -25,9 +25,9 @@ public void testMergedStats() throws Exception { int start = 0; RunningStats stats = null; List fieldAShard, fieldBShard; - for (int s = 0; s < numShards-1; start = ++s * (int)obsPerShard) { - fieldAShard = fieldA.subList(start, start + (int)obsPerShard); - fieldBShard = fieldB.subList(start, start + (int)obsPerShard); + for (int s = 0; s < numShards - 1; start = ++s * (int) obsPerShard) { + fieldAShard = fieldA.subList(start, start + (int) obsPerShard); + fieldBShard = fieldB.subList(start, start + (int) obsPerShard); if (stats == null) { stats = createRunningStats(fieldAShard, fieldBShard); } else { diff --git a/modules/aggs-matrix-stats/src/yamlRestTest/java/org/elasticsearch/search/aggregations/matrix/MatrixStatsClientYamlTestSuiteIT.java b/modules/aggs-matrix-stats/src/yamlRestTest/java/org/elasticsearch/search/aggregations/matrix/MatrixStatsClientYamlTestSuiteIT.java index 11de471e6fdf9..6f29a3fb765f3 100644 --- a/modules/aggs-matrix-stats/src/yamlRestTest/java/org/elasticsearch/search/aggregations/matrix/MatrixStatsClientYamlTestSuiteIT.java +++ b/modules/aggs-matrix-stats/src/yamlRestTest/java/org/elasticsearch/search/aggregations/matrix/MatrixStatsClientYamlTestSuiteIT.java @@ -9,11 +9,12 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; public class MatrixStatsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public MatrixStatsClientYamlTestSuiteIT(@Name("yaml")ClientYamlTestCandidate testCandidate) { + public MatrixStatsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java index 1eab2ca7c6034..d6b9cb0ac267c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java @@ -21,7 +21,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; - @ESIntegTestCase.SuiteScopeTestCase public class AggregationsIntegrationIT extends ESIntegTestCase { @@ -41,8 +40,10 @@ public void setupSuiteScopeCluster() throws Exception { public void testScroll() { final int size = randomIntBetween(1, 4); SearchResponse response = client().prepareSearch("index") - .setSize(size).setScroll(TimeValue.timeValueMinutes(1)) - .addAggregation(terms("f").field("f")).get(); + .setSize(size) + .setScroll(TimeValue.timeValueMinutes(1)) + .addAggregation(terms("f").field("f")) + .get(); assertSearchResponse(response); Aggregations aggregations = response.getAggregations(); assertNotNull(aggregations); @@ -51,9 +52,7 @@ public void testScroll() { int total = response.getHits().getHits().length; while (response.getHits().getHits().length > 0) { - response = client().prepareSearchScroll(response.getScrollId()) - .setScroll(TimeValue.timeValueMinutes(1)) - .get(); + response = client().prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); assertSearchResponse(response); assertNull(response.getAggregations()); total += response.getHits().getHits().length; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java index 166dda16dd957..4f54de00d454f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java @@ -10,6 +10,7 @@ import com.carrotsearch.hppc.IntIntHashMap; import com.carrotsearch.hppc.IntIntMap; + import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -49,30 +50,22 @@ public void testMultipleAggsOnSameField_WithDifferentRequiredValueSourceType() t String name = "name_" + randomIntBetween(1, 10); if (rarely()) { missingValues++; - builders[i] = client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field("name", name) - .endObject()); + builders[i] = client().prepareIndex("idx").setSource(jsonBuilder().startObject().field("name", name).endObject()); } else { int value = randomIntBetween(1, 10); values.put(value, values.getOrDefault(value, 0) + 1); - builders[i] = client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field("name", name) - .field("value", value) - .endObject()); + builders[i] = client().prepareIndex("idx") + .setSource(jsonBuilder().startObject().field("name", name).field("value", value).endObject()); } } indexRandom(true, builders); ensureSearchable(); - SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values()); SearchResponse response = client().prepareSearch("idx") - .addAggregation(missing("missing_values").field("value")) - .addAggregation(terms("values").field("value") - .collectMode(aggCollectionMode )) - .get(); + .addAggregation(missing("missing_values").field("value")) + .addAggregation(terms("values").field("value").collectMode(aggCollectionMode)) + .get(); assertSearchResponse(response); @@ -92,7 +85,6 @@ public void testMultipleAggsOnSameField_WithDifferentRequiredValueSourceType() t assertTrue(values.isEmpty()); } - /** * Some top aggs (eg. date_/histogram) that are executed on unmapped fields, will generate an estimate count of buckets - zero. * when the sub aggregator is then created, it will take this estimation into account. This used to cause @@ -100,22 +92,29 @@ public void testMultipleAggsOnSameField_WithDifferentRequiredValueSourceType() t */ public void testSubAggregationForTopAggregationOnUnmappedField() throws Exception { - prepareCreate("idx").setMapping(jsonBuilder() - .startObject() - .startObject("_doc").startObject("properties") - .startObject("name").field("type", "keyword").endObject() - .startObject("value").field("type", "integer").endObject() - .endObject().endObject() - .endObject()).get(); + prepareCreate("idx").setMapping( + jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("name") + .field("type", "keyword") + .endObject() + .startObject("value") + .field("type", "integer") + .endObject() + .endObject() + .endObject() + .endObject() + ).get(); ensureSearchable("idx"); SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values()); SearchResponse searchResponse = client().prepareSearch("idx") - .addAggregation(histogram("values").field("value1").interval(1) - .subAggregation(terms("names").field("name") - .collectMode(aggCollectionMode ))) - .get(); + .addAggregation( + histogram("values").field("value1").interval(1).subAggregation(terms("names").field("name").collectMode(aggCollectionMode)) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(0L)); Histogram values = searchResponse.getAggregations().get("values"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java index 6774666b7a1bd..338d4ae8a7d46 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations; import com.carrotsearch.hppc.IntHashSet; + import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -84,16 +85,16 @@ protected Map, Object>> pluginScripts() { @Before private void setupMaxBuckets() { // disables the max bucket limit for this test - client().admin().cluster().prepareUpdateSettings() + client().admin() + .cluster() + .prepareUpdateSettings() .setTransientSettings(Collections.singletonMap("search.max_buckets", Integer.MAX_VALUE)) .get(); } @After private void cleanupMaxBuckets() { - client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Collections.singletonMap("search.max_buckets", null)) - .get(); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Collections.singletonMap("search.max_buckets", null)).get(); } // Make sure that unordered, reversed, disjoint and/or overlapping ranges are supported @@ -109,47 +110,43 @@ public void testRandomRanges() throws Exception { } } - prepareCreate("idx") - .setMapping(jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("values") - .field("type", "double") - .endObject() - .endObject() - .endObject() - .endObject()).get(); + prepareCreate("idx").setMapping( + jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("values") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .endObject() + ).get(); for (int i = 0; i < docs.length; ++i) { - XContentBuilder source = jsonBuilder() - .startObject() - .startArray("values"); + XContentBuilder source = jsonBuilder().startObject().startArray("values"); for (int j = 0; j < docs[i].length; ++j) { source = source.value(docs[i][j]); } source = source.endArray().endObject(); client().prepareIndex("idx").setSource(source).get(); } - assertNoFailures(client().admin().indices().prepareRefresh("idx"). - setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .get()); + assertNoFailures(client().admin().indices().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).get()); final int numRanges = randomIntBetween(1, 20); final double[][] ranges = new double[numRanges][]; for (int i = 0; i < ranges.length; ++i) { switch (randomInt(2)) { - case 0: - ranges[i] = new double[] { Double.NEGATIVE_INFINITY, randomInt(100) }; - break; - case 1: - ranges[i] = new double[] { randomInt(100), Double.POSITIVE_INFINITY }; - break; - case 2: - ranges[i] = new double[] { randomInt(100), randomInt(100) }; - break; - default: - throw new AssertionError(); + case 0: + ranges[i] = new double[] { Double.NEGATIVE_INFINITY, randomInt(100) }; + break; + case 1: + ranges[i] = new double[] { randomInt(100), Double.POSITIVE_INFINITY }; + break; + case 2: + ranges[i] = new double[] { randomInt(100), randomInt(100) }; + break; + default: + throw new AssertionError(); } } @@ -171,7 +168,7 @@ public void testRandomRanges() throws Exception { if (ranges[i][0] != Double.NEGATIVE_INFINITY) { filter = filter.from(ranges[i][0]); } - if (ranges[i][1] != Double.POSITIVE_INFINITY){ + if (ranges[i][1] != Double.POSITIVE_INFINITY) { filter = filter.to(ranges[i][1]); } reqBuilder = reqBuilder.addAggregation(filter("filter" + i, filter)); @@ -214,32 +211,32 @@ public void testDuelTerms() throws Exception { final IntHashSet valuesSet = new IntHashSet(); cluster().wipeIndices("idx"); - prepareCreate("idx") - .setMapping(jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("num") - .field("type", "double") - .endObject() - .startObject("string_values") - .field("type", "keyword") - .startObject("fields") - .startObject("doc_values") - .field("type", "keyword") - .field("index", false) - .endObject() - .endObject() - .endObject() - .startObject("long_values") - .field("type", "long") - .endObject() - .startObject("double_values") - .field("type", "double") - .endObject() - .endObject() - .endObject() - .endObject()).get(); + prepareCreate("idx").setMapping( + jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("num") + .field("type", "double") + .endObject() + .startObject("string_values") + .field("type", "keyword") + .startObject("fields") + .startObject("doc_values") + .field("type", "keyword") + .field("index", false) + .endObject() + .endObject() + .endObject() + .startObject("long_values") + .field("type", "long") + .endObject() + .startObject("double_values") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .endObject() + ).get(); List indexingRequests = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { @@ -248,10 +245,7 @@ public void testDuelTerms() throws Exception { values[j] = randomInt(maxNumTerms - 1) - 1000; valuesSet.add(values[j]); } - XContentBuilder source = jsonBuilder() - .startObject() - .field("num", randomDouble()) - .startArray("long_values"); + XContentBuilder source = jsonBuilder().startObject().field("num", randomDouble()).startArray("long_values"); for (int j = 0; j < values.length; ++j) { source = source.value(values[j]); } @@ -268,45 +262,45 @@ public void testDuelTerms() throws Exception { } indexRandom(true, indexingRequests); - assertNoFailures(client().admin().indices().prepareRefresh("idx") - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .execute().get()); + assertNoFailures( + client().admin().indices().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get() + ); SearchResponse resp = client().prepareSearch("idx") - .addAggregation( - terms("long") - .field("long_values") - .size(maxNumTerms) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(min("min").field("num"))) - .addAggregation( - terms("double") - .field("double_values") - .size(maxNumTerms) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(max("max").field("num"))) - .addAggregation( - terms("string_map") - .field("string_values") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(TermsAggregatorFactory.ExecutionMode.MAP.toString()) - .size(maxNumTerms) - .subAggregation(stats("stats").field("num"))) - .addAggregation( - terms("string_global_ordinals") - .field("string_values") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(TermsAggregatorFactory.ExecutionMode.GLOBAL_ORDINALS.toString()) - .size(maxNumTerms) - .subAggregation(extendedStats("stats").field("num"))) - .addAggregation( - terms("string_global_ordinals_doc_values") - .field("string_values.doc_values") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(TermsAggregatorFactory.ExecutionMode.GLOBAL_ORDINALS.toString()) - .size(maxNumTerms) - .subAggregation(extendedStats("stats").field("num"))) - .get(); + .addAggregation( + terms("long").field("long_values") + .size(maxNumTerms) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(min("min").field("num")) + ) + .addAggregation( + terms("double").field("double_values") + .size(maxNumTerms) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(max("max").field("num")) + ) + .addAggregation( + terms("string_map").field("string_values") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(TermsAggregatorFactory.ExecutionMode.MAP.toString()) + .size(maxNumTerms) + .subAggregation(stats("stats").field("num")) + ) + .addAggregation( + terms("string_global_ordinals").field("string_values") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(TermsAggregatorFactory.ExecutionMode.GLOBAL_ORDINALS.toString()) + .size(maxNumTerms) + .subAggregation(extendedStats("stats").field("num")) + ) + .addAggregation( + terms("string_global_ordinals_doc_values").field("string_values.doc_values") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(TermsAggregatorFactory.ExecutionMode.GLOBAL_ORDINALS.toString()) + .size(maxNumTerms) + .subAggregation(extendedStats("stats").field("num")) + ) + .get(); assertAllSuccessful(resp); assertEquals(numDocs, resp.getHits().getTotalHits().value); @@ -339,18 +333,17 @@ public void testDuelTerms() throws Exception { // Duel between histograms and scripted terms public void testDuelTermsHistogram() throws Exception { - prepareCreate("idx") - .setMapping(jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("num") - .field("type", "double") - .endObject() - .endObject() - .endObject() - .endObject()).get(); - + prepareCreate("idx").setMapping( + jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("num") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .endObject() + ).get(); final int numDocs = scaledRandomIntBetween(500, 5000); final int maxNumTerms = randomIntBetween(10, 2000); @@ -362,10 +355,7 @@ public void testDuelTermsHistogram() throws Exception { } for (int i = 0; i < numDocs; ++i) { - XContentBuilder source = jsonBuilder() - .startObject() - .field("num", randomDouble()) - .startArray("values"); + XContentBuilder source = jsonBuilder().startObject().field("num", randomDouble()).startArray("values"); final int numValues = randomInt(4); for (int j = 0; j < numValues; ++j) { source = source.value(randomFrom(values)); @@ -373,26 +363,22 @@ public void testDuelTermsHistogram() throws Exception { source = source.endArray().endObject(); client().prepareIndex("idx").setSource(source).get(); } - assertNoFailures(client().admin().indices().prepareRefresh("idx") - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .execute().get()); + assertNoFailures( + client().admin().indices().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get() + ); Map params = new HashMap<>(); params.put("interval", interval); SearchResponse resp = client().prepareSearch("idx") - .addAggregation( - terms("terms") - .field("values") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / interval)", params)) - .size(maxNumTerms)) - .addAggregation( - histogram("histo") - .field("values") - .interval(interval) - .minDocCount(1)) - .get(); + .addAggregation( + terms("terms").field("values") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / interval)", params)) + .size(maxNumTerms) + ) + .addAggregation(histogram("histo").field("values").interval(interval).minDocCount(1)) + .get(); assertSearchResponse(resp); @@ -410,17 +396,17 @@ public void testDuelTermsHistogram() throws Exception { public void testLargeNumbersOfPercentileBuckets() throws Exception { // test high numbers of percentile buckets to make sure paging and release work correctly - prepareCreate("idx") - .setMapping(jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("double_value") - .field("type", "double") - .endObject() - .endObject() - .endObject() - .endObject()).get(); + prepareCreate("idx").setMapping( + jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("double_value") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .endObject() + ).get(); final int numDocs = scaledRandomIntBetween(2500, 5000); logger.info("Indexing [{}] docs", numDocs); @@ -431,12 +417,12 @@ public void testLargeNumbersOfPercentileBuckets() throws Exception { indexRandom(true, indexingRequests); SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms") - .field("double_value") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(percentiles("pcts").field("double_value"))) - .get(); + .addAggregation( + terms("terms").field("double_value") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(percentiles("pcts").field("double_value")) + ) + .get(); assertAllSuccessful(response); assertEquals(numDocs, response.getHits().getTotalHits().value); } @@ -447,13 +433,12 @@ public void testReduce() throws Exception { final int value = randomIntBetween(0, 10); indexRandom(true, client().prepareIndex("idx").setSource("f", value)); SearchResponse response = client().prepareSearch("idx") - .addAggregation(filter("filter", QueryBuilders.matchAllQuery()) - .subAggregation(range("range") - .field("f") - .addUnboundedTo(6) - .addUnboundedFrom(6) - .subAggregation(sum("sum").field("f")))) - .get(); + .addAggregation( + filter("filter", QueryBuilders.matchAllQuery()).subAggregation( + range("range").field("f").addUnboundedTo(6).addUnboundedFrom(6).subAggregation(sum("sum").field("f")) + ) + ) + .get(); assertSearchResponse(response); @@ -490,7 +475,7 @@ private void assertEquals(Terms t1, Terms t2) { List t1Buckets = t1.getBuckets(); List t2Buckets = t1.getBuckets(); assertEquals(t1Buckets.size(), t2Buckets.size()); - for (Iterator it1 = t1Buckets.iterator(), it2 = t2Buckets.iterator(); it1.hasNext(); ) { + for (Iterator it1 = t1Buckets.iterator(), it2 = t2Buckets.iterator(); it1.hasNext();) { final Terms.Bucket b1 = it1.next(); final Terms.Bucket b2 = it2.next(); assertEquals(b1.getDocCount(), b2.getDocCount()); @@ -510,15 +495,29 @@ public void testDuelDepthBreadthFirst() throws Exception { } indexRandom(true, reqs); - final SearchResponse r1 = client().prepareSearch("idx").addAggregation( - terms("f1").field("f1").collectMode(SubAggCollectionMode.DEPTH_FIRST) - .subAggregation(terms("f2").field("f2").collectMode(SubAggCollectionMode.DEPTH_FIRST) - .subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.DEPTH_FIRST)))).get(); + final SearchResponse r1 = client().prepareSearch("idx") + .addAggregation( + terms("f1").field("f1") + .collectMode(SubAggCollectionMode.DEPTH_FIRST) + .subAggregation( + terms("f2").field("f2") + .collectMode(SubAggCollectionMode.DEPTH_FIRST) + .subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.DEPTH_FIRST)) + ) + ) + .get(); assertSearchResponse(r1); - final SearchResponse r2 = client().prepareSearch("idx").addAggregation( - terms("f1").field("f1").collectMode(SubAggCollectionMode.BREADTH_FIRST) - .subAggregation(terms("f2").field("f2").collectMode(SubAggCollectionMode.BREADTH_FIRST) - .subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.BREADTH_FIRST)))).get(); + final SearchResponse r2 = client().prepareSearch("idx") + .addAggregation( + terms("f1").field("f1") + .collectMode(SubAggCollectionMode.BREADTH_FIRST) + .subAggregation( + terms("f2").field("f2") + .collectMode(SubAggCollectionMode.BREADTH_FIRST) + .subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.BREADTH_FIRST)) + ) + ) + .get(); assertSearchResponse(r2); final Terms t1 = r1.getAggregations().get("f1"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java index 734ab059feab8..d8cc4537f1f01 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java @@ -47,8 +47,10 @@ public void testWrapperQueryIsRewritten() throws IOException { builder.endObject(); bytesReference = BytesReference.bytes(builder); } - FiltersAggregationBuilder builder = new FiltersAggregationBuilder("titles", new FiltersAggregator.KeyedFilter("titleterms", - new WrapperQueryBuilder(bytesReference))); + FiltersAggregationBuilder builder = new FiltersAggregationBuilder( + "titles", + new FiltersAggregator.KeyedFilter("titleterms", new WrapperQueryBuilder(bytesReference)) + ); Map metadata = new HashMap<>(); metadata.put(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); builder.setMetadata(metadata); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java index 982196a43756e..1e7fe6be8f7f7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java @@ -26,48 +26,40 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; - public class MetadataIT extends ESIntegTestCase { public void testMetadataSetOnAggregationResult() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping("name", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("name", "type=keyword").get()); IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)]; for (int i = 0; i < builders.length; i++) { String name = "name_" + randomIntBetween(1, 10); - builders[i] = client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field("name", name) - .field("value", randomInt()) - .endObject()); + builders[i] = client().prepareIndex("idx") + .setSource(jsonBuilder().startObject().field("name", name).field("value", randomInt()).endObject()); } indexRandom(true, builders); ensureSearchable(); - final Map nestedMetadata = new HashMap() {{ - put("nested", "value"); - }}; - - Map metadata = new HashMap() {{ - put("key", "value"); - put("numeric", 1.2); - put("bool", true); - put("complex", nestedMetadata); - }}; + final Map nestedMetadata = new HashMap() { + { + put("nested", "value"); + } + }; + + Map metadata = new HashMap() { + { + put("key", "value"); + put("numeric", 1.2); + put("bool", true); + put("complex", nestedMetadata); + } + }; SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("the_terms") - .setMetadata(metadata) - .field("name") - .subAggregation( - sum("the_sum") - .setMetadata(metadata) - .field("value") - ) - ) - .addAggregation(maxBucket("the_max_bucket", "the_terms>the_sum").setMetadata(metadata)) - .get(); + .addAggregation( + terms("the_terms").setMetadata(metadata).field("name").subAggregation(sum("the_sum").setMetadata(metadata).field("value")) + ) + .addAggregation(maxBucket("the_max_bucket", "the_terms>the_sum").setMetadata(metadata)) + .get(); assertSearchResponse(response); @@ -104,7 +96,7 @@ private void assertMetadata(Map returnedMetadata) { assertNotNull(nestedObject); @SuppressWarnings("unchecked") - Map nestedMap = (Map)nestedObject; + Map nestedMap = (Map) nestedObject; assertEquals("value", nestedMap.get("nested")); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java index 97683a2f4f4e0..b044353ebbf9e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java @@ -43,17 +43,20 @@ protected int maximumNumberOfShards() { @Override protected void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx") - .setMapping("date", "type=date", "location", "type=geo_point", "str", "type=keyword").get()); - indexRandom(true, - client().prepareIndex("idx").setId("1").setSource(), - client().prepareIndex("idx").setId("2") - .setSource("str", "foo", "long", 3L, "double", 5.5, "date", "2015-05-07", "location", "1,2")); + assertAcked(prepareCreate("idx").setMapping("date", "type=date", "location", "type=geo_point", "str", "type=keyword").get()); + indexRandom( + true, + client().prepareIndex("idx").setId("1").setSource(), + client().prepareIndex("idx") + .setId("2") + .setSource("str", "foo", "long", 3L, "double", 5.5, "date", "2015-05-07", "location", "1,2") + ); } public void testUnmappedTerms() { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("my_terms").field("non_existing_field").missing("bar")).get(); + .addAggregation(terms("my_terms").field("non_existing_field").missing("bar")) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("my_terms"); assertEquals(1, terms.getBuckets().size()); @@ -62,11 +65,9 @@ public void testUnmappedTerms() { public void testStringTerms() { for (ExecutionMode mode : ExecutionMode.values()) { - SearchResponse response = client().prepareSearch("idx").addAggregation( - terms("my_terms") - .field("str") - .executionHint(mode.toString()) - .missing("bar")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("my_terms").field("str").executionHint(mode.toString()).missing("bar")) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("my_terms"); assertEquals(2, terms.getBuckets().size()); @@ -82,16 +83,14 @@ public void testStringTerms() { } public void testLongTerms() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("my_terms").field("long").missing(4)).get(); + SearchResponse response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(4)).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("my_terms"); assertEquals(2, terms.getBuckets().size()); assertEquals(1, terms.getBucketByKey("3").getDocCount()); assertEquals(1, terms.getBucketByKey("4").getDocCount()); - response = client().prepareSearch("idx") - .addAggregation(terms("my_terms").field("long").missing(3)).get(); + response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(3)).get(); assertSearchResponse(response); terms = response.getAggregations().get("my_terms"); assertEquals(1, terms.getBuckets().size()); @@ -99,8 +98,7 @@ public void testLongTerms() { } public void testDoubleTerms() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("my_terms").field("double").missing(4.5)).get(); + SearchResponse response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(4.5)).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("my_terms"); assertEquals(2, terms.getBuckets().size()); @@ -116,7 +114,8 @@ public void testDoubleTerms() { public void testUnmappedHistogram() { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("my_histogram").field("non-existing_field").interval(5).missing(12)).get(); + .addAggregation(histogram("my_histogram").field("non-existing_field").interval(5).missing(12)) + .get(); assertSearchResponse(response); Histogram histogram = response.getAggregations().get("my_histogram"); assertEquals(1, histogram.getBuckets().size()); @@ -126,7 +125,8 @@ public void testUnmappedHistogram() { public void testHistogram() { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("my_histogram").field("long").interval(5).missing(7)).get(); + .addAggregation(histogram("my_histogram").field("long").interval(5).missing(7)) + .get(); assertSearchResponse(response); Histogram histogram = response.getAggregations().get("my_histogram"); assertEquals(2, histogram.getBuckets().size()); @@ -135,8 +135,7 @@ public void testHistogram() { assertEquals(5d, histogram.getBuckets().get(1).getKey()); assertEquals(1, histogram.getBuckets().get(1).getDocCount()); - response = client().prepareSearch("idx") - .addAggregation(histogram("my_histogram").field("long").interval(5).missing(3)).get(); + response = client().prepareSearch("idx").addAggregation(histogram("my_histogram").field("long").interval(5).missing(3)).get(); assertSearchResponse(response); histogram = response.getAggregations().get("my_histogram"); assertEquals(1, histogram.getBuckets().size()); @@ -146,9 +145,8 @@ public void testHistogram() { public void testDateHistogram() { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2014-05-07")) - .get(); + .addAggregation(dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2014-05-07")) + .get(); assertSearchResponse(response); Histogram histogram = response.getAggregations().get("my_histogram"); assertEquals(2, histogram.getBuckets().size()); @@ -158,9 +156,8 @@ public void testDateHistogram() { assertEquals(1, histogram.getBuckets().get(1).getDocCount()); response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2015-05-07")) - .get(); + .addAggregation(dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2015-05-07")) + .get(); assertSearchResponse(response); histogram = response.getAggregations().get("my_histogram"); assertEquals(1, histogram.getBuckets().size()); @@ -169,8 +166,7 @@ public void testDateHistogram() { } public void testCardinality() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(cardinality("card").field("long").missing(2)).get(); + SearchResponse response = client().prepareSearch("idx").addAggregation(cardinality("card").field("long").missing(2)).get(); assertSearchResponse(response); Cardinality cardinality = response.getAggregations().get("card"); assertEquals(2, cardinality.getValue()); @@ -178,15 +174,15 @@ public void testCardinality() { public void testPercentiles() { SearchResponse response = client().prepareSearch("idx") - .addAggregation(percentiles("percentiles").field("long").missing(1000)).get(); + .addAggregation(percentiles("percentiles").field("long").missing(1000)) + .get(); assertSearchResponse(response); Percentiles percentiles = response.getAggregations().get("percentiles"); assertEquals(1000, percentiles.percentile(100), 0); } public void testStats() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(stats("stats").field("long").missing(5)).get(); + SearchResponse response = client().prepareSearch("idx").addAggregation(stats("stats").field("long").missing(5)).get(); assertSearchResponse(response); Stats stats = response.getAggregations().get("stats"); assertEquals(2, stats.getCount()); @@ -195,7 +191,8 @@ public void testStats() { public void testUnmappedGeoBounds() { SearchResponse response = client().prepareSearch("idx") - .addAggregation(geoBounds("bounds").field("non_existing_field").missing("2,1")).get(); + .addAggregation(geoBounds("bounds").field("non_existing_field").missing("2,1")) + .get(); assertSearchResponse(response); GeoBounds bounds = response.getAggregations().get("bounds"); assertThat(bounds.bottomRight().lat(), closeTo(2.0, 1E-5)); @@ -205,8 +202,7 @@ public void testUnmappedGeoBounds() { } public void testGeoBounds() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(geoBounds("bounds").field("location").missing("2,1")).get(); + SearchResponse response = client().prepareSearch("idx").addAggregation(geoBounds("bounds").field("location").missing("2,1")).get(); assertSearchResponse(response); GeoBounds bounds = response.getAggregations().get("bounds"); assertThat(bounds.bottomRight().lat(), closeTo(1.0, 1E-5)); @@ -217,7 +213,8 @@ public void testGeoBounds() { public void testGeoCentroid() { SearchResponse response = client().prepareSearch("idx") - .addAggregation(geoCentroid("centroid").field("location").missing("2,1")).get(); + .addAggregation(geoCentroid("centroid").field("location").missing("2,1")) + .get(); assertSearchResponse(response); GeoCentroid centroid = response.getAggregations().get("centroid"); GeoPoint point = new GeoPoint(1.5, 1.5); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/AdjacencyMatrixIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/AdjacencyMatrixIT.java index e03a93488b7e5..70d90c48d3bd3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/AdjacencyMatrixIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/AdjacencyMatrixIT.java @@ -86,8 +86,11 @@ public void setupSuiteScopeCluster() throws Exception { } prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i) - .setSource(jsonBuilder().startObject().field("value", i * 2).endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); @@ -95,9 +98,8 @@ public void setupSuiteScopeCluster() throws Exception { public void testSimple() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(adjacencyMatrix("tags", - newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")))) - .get(); + .addAggregation(adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")))) + .get(); assertSearchResponse(response); @@ -128,9 +130,8 @@ public void testSimple() throws Exception { public void testCustomSeparator() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(adjacencyMatrix("tags", "\t", - newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")))) - .get(); + .addAggregation(adjacencyMatrix("tags", "\t", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")))) + .get(); assertSearchResponse(response); @@ -147,15 +148,13 @@ public void testCustomSeparator() throws Exception { } - // See NullPointer issue when filters are empty: // https://github.com/elastic/elasticsearch/issues/8438 public void testEmptyFilterDeclarations() throws Exception { QueryBuilder emptyFilter = new BoolQueryBuilder(); SearchResponse response = client().prepareSearch("idx") - .addAggregation(adjacencyMatrix("tags", - newMap("all", emptyFilter).add("tag1", termQuery("tag", "tag1")))) - .get(); + .addAggregation(adjacencyMatrix("tags", newMap("all", emptyFilter).add("tag1", termQuery("tag", "tag1")))) + .get(); assertSearchResponse(response); @@ -174,13 +173,11 @@ public void testWithSubAggregation() throws Exception { boolQ.must(termQuery("tag", "tag1")); boolQ.must(termQuery("tag", "tag2")); SearchResponse response = client().prepareSearch("idx") - .addAggregation( - adjacencyMatrix("tags", - newMap("tag1", termQuery("tag", "tag1")) - .add("tag2", termQuery("tag", "tag2")) - .add("both", boolQ)) - .subAggregation(avg("avg_value").field("value"))) - .get(); + .addAggregation( + adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")).add("both", boolQ)) + .subAggregation(avg("avg_value").field("value")) + ) + .get(); assertSearchResponse(response); @@ -190,10 +187,10 @@ public void testWithSubAggregation() throws Exception { int expectedBuckets = 0; if (numTag1Docs > 0) { - expectedBuckets ++; + expectedBuckets++; } if (numTag2Docs > 0) { - expectedBuckets ++; + expectedBuckets++; } if (numMultiTagDocs > 0) { // both, both&tag1, both&tag2, tag1&tag2 @@ -201,11 +198,11 @@ public void testWithSubAggregation() throws Exception { } assertThat(matrix.getBuckets().size(), equalTo(expectedBuckets)); - assertThat(((InternalAggregation)matrix).getProperty("_bucket_count"), equalTo(expectedBuckets)); + assertThat(((InternalAggregation) matrix).getProperty("_bucket_count"), equalTo(expectedBuckets)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)matrix).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)matrix).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation)matrix).getProperty("avg_value.value"); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) matrix).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) matrix).getProperty("_count"); + Object[] propertiesCounts = (Object[]) ((InternalAggregation) matrix).getProperty("avg_value.value"); assertEquals(expectedBuckets, propertiesKeys.length); assertEquals(propertiesKeys.length, propertiesDocCounts.length); @@ -259,8 +256,7 @@ public void testWithSubAggregation() throws Exception { assertThat(bucketIntersectQ, Matchers.nullValue()); Bucket tag1Both = matrix.getBucketByKey("both&tag1"); assertThat(tag1Both, Matchers.nullValue()); - } else - { + } else { assertThat(bucketBothQ, Matchers.notNullValue()); assertThat(bucketBothQ.getDocCount(), equalTo((long) numMultiTagDocs)); Avg avgValueBothQ = bucketBothQ.getAggregations().get("avg_value"); @@ -278,10 +274,9 @@ public void testWithSubAggregation() throws Exception { assertThat(avgValueTag1BothIntersectQ.getValue(), equalTo(avgValueBothQ.getValue())); } - } - public void testTooLargeMatrix() throws Exception{ + public void testTooLargeMatrix() throws Exception { // Create more filters than is permitted by Lucene Bool clause settings. MapBuilder filtersMap = new MapBuilder(); @@ -291,9 +286,7 @@ public void testTooLargeMatrix() throws Exception{ } try { - client().prepareSearch("idx") - .addAggregation(adjacencyMatrix("tags", "\t", filtersMap)) - .get(); + client().prepareSearch("idx").addAggregation(adjacencyMatrix("tags", "\t", filtersMap)).get(); fail("SearchPhaseExecutionException should have been thrown"); } catch (SearchPhaseExecutionException ex) { assertThat(ex.getCause().getMessage(), containsString("Number of filters is too large")); @@ -301,8 +294,11 @@ public void testTooLargeMatrix() throws Exception{ } public void testAsSubAggregation() { - SearchResponse response = client().prepareSearch("idx").addAggregation(histogram("histo").field("value").interval(2L) - .subAggregation(adjacencyMatrix("matrix", newMap("all", matchAllQuery())))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field("value").interval(2L).subAggregation(adjacencyMatrix("matrix", newMap("all", matchAllQuery()))) + ) + .get(); assertSearchResponse(response); @@ -323,13 +319,17 @@ public void testWithContextBasedSubAggregation() throws Exception { try { client().prepareSearch("idx") - .addAggregation(adjacencyMatrix("tags", - newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2"))) - .subAggregation(avg("avg_value"))) - .get(); + .addAggregation( + adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2"))).subAggregation( + avg("avg_value") + ) + ) + .get(); - fail("expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source" - + "context which the sub-aggregation can inherit"); + fail( + "expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source" + + "context which the sub-aggregation can inherit" + ); } catch (ElasticsearchException e) { assertThat(e.getMessage(), is("all shards failed")); @@ -337,10 +337,15 @@ public void testWithContextBasedSubAggregation() throws Exception { } public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client() - .prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()).addAggregation(histogram("histo").field("value").interval(1L) - .minDocCount(0).subAggregation(adjacencyMatrix("matrix", newMap("all", matchAllQuery())))) - .get(); + SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation(adjacencyMatrix("matrix", newMap("all", matchAllQuery()))) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java index c963315cd71e0..ad9574b1d7c53 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java @@ -48,35 +48,35 @@ public void setupSuiteScopeCluster() throws Exception { break; case 1: numMultiFalses++; - multiValue = new boolean[] {false}; + multiValue = new boolean[] { false }; break; case 2: numMultiTrues++; - multiValue = new boolean[] {true}; + multiValue = new boolean[] { true }; break; case 3: numMultiFalses++; numMultiTrues++; - multiValue = new boolean[] {false, true}; + multiValue = new boolean[] { false, true }; break; default: throw new AssertionError(); } - builders[i] = client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, singleValue) - .array(MULTI_VALUED_FIELD_NAME, multiValue) - .endObject()); + builders[i] = client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, singleValue) + .array(MULTI_VALUED_FIELD_NAME, multiValue) + .endObject() + ); } indexRandom(true, builders); } public void testSingleValueField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation(terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))) + .get(); assertSearchResponse(response); @@ -107,10 +107,8 @@ public void testSingleValueField() throws Exception { public void testMultiValueField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation(terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))) + .get(); assertSearchResponse(response); @@ -141,11 +139,10 @@ public void testMultiValueField() throws Exception { public void testUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .size(between(1, 5)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME).size(between(1, 5)).collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 46fc3cd772847..17802313e0af8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -86,22 +86,32 @@ private static String format(ZonedDateTime date, String pattern) { } private IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception { - return client().prepareIndex(idx).setSource(jsonBuilder() - .startObject() - .timeField("date", date) - .field("value", value) - .startArray("dates").timeValue(date).timeValue(date.plusMonths(1).plusDays(1)).endArray() - .endObject()); + return client().prepareIndex(idx) + .setSource( + jsonBuilder().startObject() + .timeField("date", date) + .field("value", value) + .startArray("dates") + .timeValue(date) + .timeValue(date.plusMonths(1).plusDays(1)) + .endArray() + .endObject() + ); } private IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { - return client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field("value", value) - .field("constant", 1) - .timeField("date", date(month, day)) - .startArray("dates").timeValue(date(month, day)).timeValue(date(month + 1, day + 1)).endArray() - .endObject()); + return client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field("value", value) + .field("constant", 1) + .timeField("date", date(month, day)) + .startArray("dates") + .timeValue(date(month, day)) + .timeValue(date(month + 1, day + 1)) + .endArray() + .endObject() + ); } @Override @@ -111,21 +121,25 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").setMapping("value", "type=integer")); List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource(jsonBuilder() - .startObject() - .field("value", i * 2) - .endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) + ); } getMultiSortDocs(builders); - builders.addAll(Arrays.asList( + builders.addAll( + Arrays.asList( indexDoc(1, 2, 1), // date: Jan 2, dates: Jan 2, Feb 3 indexDoc(2, 2, 2), // date: Feb 2, dates: Feb 2, Mar 3 indexDoc(2, 15, 3), // date: Feb 15, dates: Feb 15, Mar 16 indexDoc(3, 2, 4), // date: Mar 2, dates: Mar 2, Apr 3 indexDoc(3, 15, 5), // date: Mar 15, dates: Mar 15, Apr 16 - indexDoc(3, 23, 6))); // date: Mar 23, dates: Mar 23, Apr 24 + indexDoc(3, 23, 6) + ) + ); // date: Mar 23, dates: Mar 23, Apr 24 indexRandom(true, builders); ensureSearchable(); } @@ -148,30 +162,49 @@ private void getMultiSortDocs(List builders) throws IOExcep addExpectedBucket(date(1, 6), 1, 5, 1); addExpectedBucket(date(1, 7), 1, 5, 1); - assertAcked(client().admin().indices().prepareCreate("sort_idx") - .setMapping("date", "type=date").get()); + assertAcked(client().admin().indices().prepareCreate("sort_idx").setMapping("date", "type=date").get()); for (int i = 1; i <= 3; i++) { - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 1)).field("l", 1).field("d", i).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 2)).field("l", 2).field("d", i).endObject())); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().timeField("date", date(1, 1)).field("l", 1).field("d", i).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().timeField("date", date(1, 2)).field("l", 2).field("d", i).endObject()) + ); } - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 3)).field("l", 3).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 3).plusHours(1)).field("l", 3).field("d", 2).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 4)).field("l", 3).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 4).plusHours(2)).field("l", 3).field("d", 3).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 5)).field("l", 5).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 5).plusHours(12)).field("l", 5).field("d", 2).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 6)).field("l", 5).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().timeField("date", date(1, 7)).field("l", 5).field("d", 1).endObject())); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().timeField("date", date(1, 3)).field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().timeField("date", date(1, 3).plusHours(1)).field("l", 3).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().timeField("date", date(1, 4)).field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().timeField("date", date(1, 4).plusHours(2)).field("l", 3).field("d", 3).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().timeField("date", date(1, 5)).field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().timeField("date", date(1, 5).plusHours(12)).field("l", 5).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().timeField("date", date(1, 6)).field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().timeField("date", date(1, 7)).field("l", 5).field("d", 1).endObject()) + ); } @Override @@ -194,8 +227,8 @@ private static String getBucketKeyAsString(ZonedDateTime key, ZoneId tz) { public void testSingleValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) - .get(); + .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) + .get(); assertSearchResponse(response); @@ -229,9 +262,14 @@ public void testSingleValuedField() throws Exception { public void testSingleValuedFieldWithTimeZone() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.DAY).minDocCount(1) - .timeZone(ZoneId.of("+01:00"))).execute() - .actionGet(); + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.DAY) + .minDocCount(1) + .timeZone(ZoneId.of("+01:00")) + ) + .execute() + .actionGet(); ZoneId tz = ZoneId.of("+01:00"); assertSearchResponse(response); @@ -292,10 +330,10 @@ public void testSingleValued_timeZone_epoch() throws Exception { } ZoneId tz = ZoneId.of("+01:00"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date") - .calendarInterval(DateHistogramInterval.DAY).minDocCount(1) - .timeZone(tz).format(format)) - .get(); + .addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.DAY).minDocCount(1).timeZone(tz).format(format) + ) + .get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); @@ -329,11 +367,8 @@ public void testSingleValued_timeZone_epoch() throws Exception { public void testSingleValuedFieldOrderedByKeyAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.key(true))) - .get(); + .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.key(true))) + .get(); assertSearchResponse(response); @@ -352,11 +387,10 @@ public void testSingleValuedFieldOrderedByKeyAsc() throws Exception { public void testSingleValuedFieldOrderedByKeyDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.key(false))) - .get(); + .addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.key(false)) + ) + .get(); assertSearchResponse(response); @@ -374,11 +408,10 @@ public void testSingleValuedFieldOrderedByKeyDesc() throws Exception { public void testSingleValuedFieldOrderedByCountAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.count(true))) - .get(); + .addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(true)) + ) + .get(); assertSearchResponse(response); @@ -396,11 +429,10 @@ public void testSingleValuedFieldOrderedByCountAsc() throws Exception { public void testSingleValuedFieldOrderedByCountDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.count(false))) - .get(); + .addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(false)) + ) + .get(); assertSearchResponse(response); @@ -418,9 +450,10 @@ public void testSingleValuedFieldOrderedByCountDesc() throws Exception { public void testSingleValuedFieldWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH) - .subAggregation(sum("sum").field("value"))) - .get(); + .addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).subAggregation(sum("sum").field("value")) + ) + .get(); assertSearchResponse(response); @@ -429,10 +462,10 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { assertThat(histo.getName(), equalTo("histo")); List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); - assertThat(((InternalAggregation)histo).getProperty("_bucket_count"), equalTo(3)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)histo).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)histo).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation)histo).getProperty("sum.value"); + assertThat(((InternalAggregation) histo).getProperty("_bucket_count"), equalTo(3)); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) histo).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) histo).getProperty("_count"); + Object[] propertiesCounts = (Object[]) ((InternalAggregation) histo).getProperty("sum.value"); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); @@ -476,12 +509,13 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.aggregation("sum", true)) - .subAggregation(max("sum").field("value"))) - .get(); + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .order(BucketOrder.aggregation("sum", true)) + .subAggregation(max("sum").field("value")) + ) + .get(); assertSearchResponse(response); @@ -499,12 +533,13 @@ public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.aggregation("sum", false)) - .subAggregation(max("sum").field("value"))) - .get(); + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .order(BucketOrder.aggregation("sum", false)) + .subAggregation(max("sum").field("value")) + ) + .get(); assertSearchResponse(response); @@ -522,12 +557,13 @@ public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.aggregation("stats", "sum", false)) - .subAggregation(stats("stats").field("value"))) - .get(); + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .order(BucketOrder.aggregation("stats", "sum", false)) + .subAggregation(stats("stats").field("value")) + ) + .get(); assertSearchResponse(response); @@ -545,11 +581,12 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws public void testSingleValuedFieldOrderedByTieBreaker() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.aggregation("max_constant", randomBoolean())) - .subAggregation(max("max_constant").field("constant"))) + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .order(BucketOrder.aggregation("max_constant", randomBoolean())) + .subAggregation(max("max_constant").field("constant")) + ) .get(); assertSearchResponse(response); @@ -569,16 +606,17 @@ public void testSingleValuedFieldOrderedByTieBreaker() throws Exception { public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { boolean asc = true; try { - client() - .prepareSearch("idx") + client().prepareSearch("idx") .addAggregation( dateHistogram("histo").field("date") .calendarInterval(DateHistogramInterval.MONTH) .order(BucketOrder.aggregation("inner_histo>avg", asc)) - .subAggregation(dateHistogram("inner_histo") - .calendarInterval(DateHistogramInterval.MONTH) - .field("dates") - .subAggregation(avg("avg").field("value")))) + .subAggregation( + dateHistogram("inner_histo").calendarInterval(DateHistogramInterval.MONTH) + .field("dates") + .subAggregation(avg("avg").field("value")) + ) + ) .get(); fail("Expected an exception"); } catch (SearchPhaseExecutionException e) { @@ -601,10 +639,12 @@ public void testSingleValuedFieldWithValueScript() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "date"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("date") - .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params)) - .calendarInterval(DateHistogramInterval.MONTH)).get(); + .addAggregation( + dateHistogram("histo").field("date") + .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params)) + .calendarInterval(DateHistogramInterval.MONTH) + ) + .get(); assertSearchResponse(response); @@ -648,8 +688,8 @@ public void testSingleValuedFieldWithValueScript() throws Exception { public void testMultiValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH)) - .get(); + .addAggregation(dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH)) + .get(); assertSearchResponse(response); @@ -690,11 +730,10 @@ public void testMultiValuedField() throws Exception { public void testMultiValuedFieldOrderedByCountDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("dates") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.count(false))) - .get(); + .addAggregation( + dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(false)) + ) + .get(); assertSearchResponse(response); @@ -740,10 +779,12 @@ public void testMultiValuedFieldWithValueScript() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "dates"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("dates") - .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params)) - .calendarInterval(DateHistogramInterval.MONTH)).get(); + .addAggregation( + dateHistogram("histo").field("dates") + .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params)) + .calendarInterval(DateHistogramInterval.MONTH) + ) + .get(); assertSearchResponse(response); @@ -794,10 +835,11 @@ public void testScriptSingleValue() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "date"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script( - new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) - .calendarInterval(DateHistogramInterval.MONTH)) - .get(); + .addAggregation( + dateHistogram("histo").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) + .calendarInterval(DateHistogramInterval.MONTH) + ) + .get(); assertSearchResponse(response); @@ -833,10 +875,11 @@ public void testScriptMultiValued() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "dates"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script( - new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) - .calendarInterval(DateHistogramInterval.MONTH)) - .get(); + .addAggregation( + dateHistogram("histo").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) + .calendarInterval(DateHistogramInterval.MONTH) + ) + .get(); assertSearchResponse(response); @@ -875,21 +918,19 @@ public void testScriptMultiValued() throws Exception { assertThat(bucket.getDocCount(), equalTo(3L)); } - - - /* + /* [ Jan 2, Feb 3] [ Feb 2, Mar 3] [ Feb 15, Mar 16] [ Mar 2, Apr 3] [ Mar 15, Apr 16] [ Mar 23, Apr 24] - */ + */ public void testUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) - .get(); + .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) + .get(); assertSearchResponse(response); @@ -901,8 +942,8 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) - .get(); + .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) + .get(); assertSearchResponse(response); @@ -936,10 +977,14 @@ public void testPartiallyUnmapped() throws Exception { public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) - .subAggregation(dateHistogram("date_histo").field("value").fixedInterval(DateHistogramInterval.HOUR))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation(dateHistogram("date_histo").field("value").fixedInterval(DateHistogramInterval.HOUR)) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -963,20 +1008,22 @@ public void testSingleValueWithTimeZone() throws Exception { IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; ZonedDateTime date = date("2014-03-11T00:00:00+00:00"); for (int i = 0; i < reqs.length; i++) { - reqs[i] = client().prepareIndex("idx2").setId("" + i) - .setSource(jsonBuilder().startObject().timeField("date", date).endObject()); + reqs[i] = client().prepareIndex("idx2") + .setId("" + i) + .setSource(jsonBuilder().startObject().timeField("date", date).endObject()); date = date.plusHours(1); } indexRandom(true, reqs); SearchResponse response = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) - .addAggregation(dateHistogram("date_histo") - .field("date") - .timeZone(ZoneId.of("-02:00")) - .calendarInterval(DateHistogramInterval.DAY) - .format("yyyy-MM-dd:HH-mm-ssZZZZZ")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + dateHistogram("date_histo").field("date") + .timeZone(ZoneId.of("-02:00")) + .calendarInterval(DateHistogramInterval.DAY) + .format("yyyy-MM-dd:HH-mm-ssZZZZZ") + ) + .get(); assertThat(response.getHits().getTotalHits().value, equalTo(5L)); @@ -1004,10 +1051,9 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception { ZonedDateTime baseKey = Instant.ofEpochMilli(intervalMillis * (base.toInstant().toEpochMilli() / intervalMillis)) .atZone(ZoneOffset.UTC); - prepareCreate("idx2") - .setSettings( - Settings.builder().put(indexSettings()).put("index.number_of_shards", 1) - .put("index.number_of_replicas", 0)).get(); + prepareCreate("idx2").setSettings( + Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0) + ).get(); int numOfBuckets = randomIntBetween(3, 6); int emptyBucketIndex = randomIntBetween(1, numOfBuckets - 2); // should be in the middle @@ -1066,14 +1112,15 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception { SearchResponse response = null; try { response = client().prepareSearch("idx2") - .addAggregation(dateHistogram("histo") - .field("date") - .fixedInterval(DateHistogramInterval.days(interval)) - .minDocCount(0) - // when explicitly specifying a format, the extended bounds should be defined by the same format - .extendedBounds(new LongBounds(format(boundsMin, pattern), format(boundsMax, pattern))) - .format(pattern)) - .get(); + .addAggregation( + dateHistogram("histo").field("date") + .fixedInterval(DateHistogramInterval.days(interval)) + .minDocCount(0) + // when explicitly specifying a format, the extended bounds should be defined by the same format + .extendedBounds(new LongBounds(format(boundsMin, pattern), format(boundsMax, pattern))) + .format(pattern) + ) + .get(); if (invalidBoundsError) { fail("Expected an exception to be thrown when bounds.min is greater than bounds.max"); @@ -1113,9 +1160,9 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception { */ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { String index = "test12278"; - prepareCreate(index) - .setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0)) - .get(); + prepareCreate(index).setSettings( + Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0) + ).get(); DateMathParser parser = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis").toDateMathParser(); @@ -1133,18 +1180,25 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { SearchResponse response = null; // retrieve those docs with the same time zone and extended bounds - response = client() - .prepareSearch(index) - .setQuery(QueryBuilders.rangeQuery("date") - .from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId())) - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.hours(1)) - .timeZone(timezone).minDocCount(0).extendedBounds(new LongBounds("now/d", "now/d+23h")) - ).get(); + response = client().prepareSearch(index) + .setQuery( + QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId()) + ) + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.hours(1)) + .timeZone(timezone) + .minDocCount(0) + .extendedBounds(new LongBounds("now/d", "now/d+23h")) + ) + .get(); assertSearchResponse(response); - assertThat("Expected 24 buckets for one day aggregation with hourly interval", response.getHits().getTotalHits().value, - equalTo(2L)); + assertThat( + "Expected 24 buckets for one day aggregation with hourly interval", + response.getHits().getTotalHits().value, + equalTo(2L) + ); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -1172,9 +1226,9 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { */ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { String index = "test23776"; - prepareCreate(index) - .setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0)) - .get(); + prepareCreate(index).setSettings( + Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0) + ).get(); List builders = new ArrayList<>(); DateFormatter formatter = DateFormatter.forPattern("date_optional_time"); @@ -1187,13 +1241,15 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { SearchResponse response = null; // retrieve those docs with the same time zone and extended bounds - response = client() - .prepareSearch(index) - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.days(1)) - .offset("+6h").minDocCount(0) - .extendedBounds(new LongBounds("2016-01-01T06:00:00Z", "2016-01-08T08:00:00Z")) - ).get(); + response = client().prepareSearch(index) + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.days(1)) + .offset("+6h") + .minDocCount(0) + .extendedBounds(new LongBounds("2016-01-01T06:00:00Z", "2016-01-08T08:00:00Z")) + ) + .get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); @@ -1223,24 +1279,29 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { } public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception { - String mappingJson = Strings.toString(jsonBuilder().startObject() + String mappingJson = Strings.toString( + jsonBuilder().startObject() .startObject("properties") - .startObject("date").field("type", "date").field("format", "strict_date_optional_time||dd-MM-yyyy") - .endObject().endObject().endObject()); + .startObject("date") + .field("type", "date") + .field("format", "strict_date_optional_time||dd-MM-yyyy") + .endObject() + .endObject() + .endObject() + ); prepareCreate("idx2").setMapping(mappingJson).get(); IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; for (int i = 0; i < reqs.length; i++) { - reqs[i] = client().prepareIndex("idx2").setId("" + i) - .setSource(jsonBuilder().startObject().field("date", "10-03-2014").endObject()); + reqs[i] = client().prepareIndex("idx2") + .setId("" + i) + .setSource(jsonBuilder().startObject().field("date", "10-03-2014").endObject()); } indexRandom(true, reqs); SearchResponse response = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) - .addAggregation(dateHistogram("date_histo") - .field("date") - .calendarInterval(DateHistogramInterval.DAY)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(dateHistogram("date_histo").field("date").calendarInterval(DateHistogramInterval.DAY)) + .get(); assertSearchHits(response, "0", "1", "2", "3", "4"); @@ -1258,9 +1319,13 @@ public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception public void testIssue6965() { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("+01:00")) - .calendarInterval(DateHistogramInterval.MONTH).minDocCount(0)) - .get(); + .addAggregation( + dateHistogram("histo").field("date") + .timeZone(ZoneId.of("+01:00")) + .calendarInterval(DateHistogramInterval.MONTH) + .minDocCount(0) + ) + .get(); assertSearchResponse(response); @@ -1296,13 +1361,20 @@ public void testIssue6965() { public void testDSTBoundaryIssue9491() throws InterruptedException, ExecutionException { assertAcked(client().admin().indices().prepareCreate("test9491").setMapping("d", "type=date").get()); - indexRandom(true, client().prepareIndex("test9491").setSource("d", "2014-10-08T13:00:00Z"), - client().prepareIndex("test9491").setSource("d", "2014-11-08T13:00:00Z")); + indexRandom( + true, + client().prepareIndex("test9491").setSource("d", "2014-10-08T13:00:00Z"), + client().prepareIndex("test9491").setSource("d", "2014-11-08T13:00:00Z") + ); ensureSearchable("test9491"); SearchResponse response = client().prepareSearch("test9491") - .addAggregation(dateHistogram("histo").field("d").calendarInterval(DateHistogramInterval.YEAR) - .timeZone(ZoneId.of("Asia/Jerusalem")).format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX")) - .get(); + .addAggregation( + dateHistogram("histo").field("d") + .calendarInterval(DateHistogramInterval.YEAR) + .timeZone(ZoneId.of("Asia/Jerusalem")) + .format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX") + ) + .get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(1)); @@ -1312,16 +1384,22 @@ public void testDSTBoundaryIssue9491() throws InterruptedException, ExecutionExc public void testIssue8209() throws InterruptedException, ExecutionException { assertAcked(client().admin().indices().prepareCreate("test8209").setMapping("d", "type=date").get()); - indexRandom(true, - client().prepareIndex("test8209").setSource("d", "2014-01-01T00:00:00Z"), - client().prepareIndex("test8209").setSource("d", "2014-04-01T00:00:00Z"), - client().prepareIndex("test8209").setSource("d", "2014-04-30T00:00:00Z")); + indexRandom( + true, + client().prepareIndex("test8209").setSource("d", "2014-01-01T00:00:00Z"), + client().prepareIndex("test8209").setSource("d", "2014-04-01T00:00:00Z"), + client().prepareIndex("test8209").setSource("d", "2014-04-30T00:00:00Z") + ); ensureSearchable("test8209"); SearchResponse response = client().prepareSearch("test8209") - .addAggregation(dateHistogram("histo").field("d").calendarInterval(DateHistogramInterval.MONTH) + .addAggregation( + dateHistogram("histo").field("d") + .calendarInterval(DateHistogramInterval.MONTH) .format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX") - .timeZone(ZoneId.of("CET")).minDocCount(0)) - .get(); + .timeZone(ZoneId.of("CET")) + .minDocCount(0) + ) + .get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(4)); @@ -1351,10 +1429,14 @@ public void testFormatIndexUnmapped() throws InterruptedException, ExecutionExce ensureSearchable(indexDateUnmapped); SearchResponse response = client().prepareSearch(indexDateUnmapped) - .addAggregation( - dateHistogram("histo").field("dateField").calendarInterval(DateHistogramInterval.MONTH).format("yyyy-MM") - .minDocCount(0).extendedBounds(new LongBounds("2018-01", "2018-01"))) - .get(); + .addAggregation( + dateHistogram("histo").field("dateField") + .calendarInterval(DateHistogramInterval.MONTH) + .format("yyyy-MM") + .minDocCount(0) + .extendedBounds(new LongBounds("2018-01", "2018-01")) + ) + .get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(1)); @@ -1373,8 +1455,11 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, assertAcked(client().admin().indices().prepareCreate(index).setMapping("d", "type=date,format=epoch_millis").get()); indexRandom(true, client().prepareIndex(index).setSource("d", "1477954800000")); ensureSearchable(index); - SearchResponse response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d") - .calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin"))).get(); + SearchResponse response = client().prepareSearch(index) + .addAggregation( + dateHistogram("histo").field("d").calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin")) + ) + .get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(1)); @@ -1385,9 +1470,14 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, } assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L)); - response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d") - .calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin")).format("yyyy-MM-dd")) - .get(); + response = client().prepareSearch(index) + .addAggregation( + dateHistogram("histo").field("d") + .calendarInterval(DateHistogramInterval.MONTH) + .timeZone(ZoneId.of("Europe/Berlin")) + .format("yyyy-MM-dd") + ) + .get(); assertSearchResponse(response); histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(1)); @@ -1406,38 +1496,64 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, */ public void testDSTEndTransition() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(new MatchNoneQueryBuilder()) - .addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("Europe/Oslo")) - .calendarInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds( - new LongBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00"))) - .get(); + .setQuery(new MatchNoneQueryBuilder()) + .addAggregation( + dateHistogram("histo").field("date") + .timeZone(ZoneId.of("Europe/Oslo")) + .calendarInterval(DateHistogramInterval.HOUR) + .minDocCount(0) + .extendedBounds(new LongBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00")) + ) + .get(); Histogram histo = response.getAggregations().get("histo"); List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(4)); - assertThat(((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli() - - ((ZonedDateTime) buckets.get(0).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); - assertThat(((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli() - - ((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); - assertThat(((ZonedDateTime) buckets.get(3).getKey()).toInstant().toEpochMilli() - - ((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); + assertThat( + ((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli() - ((ZonedDateTime) buckets.get(0).getKey()).toInstant() + .toEpochMilli(), + equalTo(3600000L) + ); + assertThat( + ((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli() - ((ZonedDateTime) buckets.get(1).getKey()).toInstant() + .toEpochMilli(), + equalTo(3600000L) + ); + assertThat( + ((ZonedDateTime) buckets.get(3).getKey()).toInstant().toEpochMilli() - ((ZonedDateTime) buckets.get(2).getKey()).toInstant() + .toEpochMilli(), + equalTo(3600000L) + ); response = client().prepareSearch("idx") .setQuery(new MatchNoneQueryBuilder()) - .addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("Europe/Oslo")) - .calendarInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds( - new LongBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00"))) + .addAggregation( + dateHistogram("histo").field("date") + .timeZone(ZoneId.of("Europe/Oslo")) + .calendarInterval(DateHistogramInterval.HOUR) + .minDocCount(0) + .extendedBounds(new LongBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00")) + ) .get(); histo = response.getAggregations().get("histo"); buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(4)); - assertThat(((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli() - - ((ZonedDateTime) buckets.get(0).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); - assertThat(((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli() - - ((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); - assertThat(((ZonedDateTime) buckets.get(3).getKey()).toInstant().toEpochMilli() - - ((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); + assertThat( + ((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli() - ((ZonedDateTime) buckets.get(0).getKey()).toInstant() + .toEpochMilli(), + equalTo(3600000L) + ); + assertThat( + ((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli() - ((ZonedDateTime) buckets.get(1).getKey()).toInstant() + .toEpochMilli(), + equalTo(3600000L) + ); + assertThat( + ((ZonedDateTime) buckets.get(3).getKey()).toInstant().toEpochMilli() - ((ZonedDateTime) buckets.get(2).getKey()).toInstant() + .toEpochMilli(), + equalTo(3600000L) + ); } /** @@ -1445,53 +1561,142 @@ public void testDSTEndTransition() throws Exception { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=date") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=date") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); + .get() + ); String date = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(1, 1)); String date2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(2, 1)); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("d", date), - client().prepareIndex("cache_test_idx").setId("2").setSource("d", date2)); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("d", date), + client().prepareIndex("cache_test_idx").setId("2").setSource("d", date2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached Map params = new HashMap<>(); params.put("fieldname", "d"); - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateHistogram("histo").field("d") - .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.CURRENT_DATE, params)) - .calendarInterval(DateHistogramInterval.MONTH)).get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + dateHistogram("histo").field("d") + .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.CURRENT_DATE, params)) + .calendarInterval(DateHistogramInterval.MONTH) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateHistogram("histo").field("d") - .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params)) - .calendarInterval(DateHistogramInterval.MONTH)).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + dateHistogram("histo").field("d") + .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params)) + .calendarInterval(DateHistogramInterval.MONTH) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(dateHistogram("histo").field("d").calendarInterval(DateHistogramInterval.MONTH)).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(dateHistogram("histo").field("d").calendarInterval(DateHistogramInterval.MONTH)) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndKeyDesc() throws Exception { @@ -1500,43 +1705,51 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndKeyDesc } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndKeyAsc() throws Exception { - int[] expectedDays = new int[] { 1, 2, 3, 4, 5, 6, 7 }; + int[] expectedDays = new int[] { 1, 2, 3, 4, 5, 6, 7 }; assertMultiSortResponse(expectedDays, BucketOrder.aggregation("avg_l", true), BucketOrder.key(true)); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationDescAndKeyAsc() throws Exception { - int[] expectedDays = new int[] { 5, 6, 7, 3, 4, 2, 1 }; + int[] expectedDays = new int[] { 5, 6, 7, 3, 4, 2, 1 }; assertMultiSortResponse(expectedDays, BucketOrder.aggregation("avg_l", false), BucketOrder.key(true)); } public void testSingleValuedFieldOrderedByCountAscAndSingleValueSubAggregationAsc() throws Exception { - int[] expectedDays = new int[] { 6, 7, 3, 4, 5, 1, 2 }; + int[] expectedDays = new int[] { 6, 7, 3, 4, 5, 1, 2 }; assertMultiSortResponse(expectedDays, BucketOrder.count(true), BucketOrder.aggregation("avg_l", true)); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscSingleValueSubAggregationAsc() throws Exception { - int[] expectedDays = new int[] { 6, 7, 3, 5, 4, 1, 2 }; + int[] expectedDays = new int[] { 6, 7, 3, 5, 4, 1, 2 }; assertMultiSortResponse(expectedDays, BucketOrder.aggregation("sum_d", true), BucketOrder.aggregation("avg_l", true)); } public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception { - int[] expectedDays = new int[] { 2, 1, 4, 5, 3, 6, 7 }; - assertMultiSortResponse(expectedDays, BucketOrder.count(false), BucketOrder.aggregation("sum_d", false), - BucketOrder.aggregation("avg_l", false)); + int[] expectedDays = new int[] { 2, 1, 4, 5, 3, 6, 7 }; + assertMultiSortResponse( + expectedDays, + BucketOrder.count(false), + BucketOrder.aggregation("sum_d", false), + BucketOrder.aggregation("avg_l", false) + ); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception { - int[] expectedDays = new int[] { 1, 2, 3, 4, 5, 6, 7 }; + int[] expectedDays = new int[] { 1, 2, 3, 4, 5, 6, 7 }; assertMultiSortResponse(expectedDays, BucketOrder.aggregation("avg_l", true)); } private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) { ZonedDateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(ZonedDateTime[]::new); - SearchResponse response = client() - .prepareSearch("sort_idx") + SearchResponse response = client().prepareSearch("sort_idx") .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.DAY).order(BucketOrder.compound(order)) - .subAggregation(avg("avg_l").field("l")).subAggregation(sum("sum_d").field("d"))).get(); + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.DAY) + .order(BucketOrder.compound(order)) + .subAggregation(avg("avg_l").field("l")) + .subAggregation(sum("sum_d").field("d")) + ) + .get(); assertSearchResponse(response); @@ -1570,15 +1783,16 @@ private ZonedDateTime key(Histogram.Bucket bucket) { */ public void testDateNanosHistogram() throws Exception { assertAcked(prepareCreate("nanos").setMapping("date", "type=date_nanos").get()); - indexRandom(true, - client().prepareIndex("nanos").setId("1").setSource("date", "2000-01-01")); - indexRandom(true, - client().prepareIndex("nanos").setId("2").setSource("date", "2000-01-02")); + indexRandom(true, client().prepareIndex("nanos").setId("1").setSource("date", "2000-01-01")); + indexRandom(true, client().prepareIndex("nanos").setId("2").setSource("date", "2000-01-02")); - //Search interval 24 hours + // Search interval 24 hours SearchResponse r = client().prepareSearch("nanos") - .addAggregation(dateHistogram("histo").field("date"). - fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24)).timeZone(ZoneId.of("Europe/Berlin"))) + .addAggregation( + dateHistogram("histo").field("date") + .fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24)) + .timeZone(ZoneId.of("Europe/Berlin")) + ) .addDocValueField("date") .get(); assertSearchResponse(r); @@ -1586,14 +1800,15 @@ public void testDateNanosHistogram() throws Exception { Histogram histogram = r.getAggregations().get("histo"); List buckets = histogram.getBuckets(); assertEquals(2, buckets.size()); - assertEquals(946681200000L, ((ZonedDateTime)buckets.get(0).getKey()).toEpochSecond() * 1000); + assertEquals(946681200000L, ((ZonedDateTime) buckets.get(0).getKey()).toEpochSecond() * 1000); assertEquals(1, buckets.get(0).getDocCount()); - assertEquals(946767600000L, ((ZonedDateTime)buckets.get(1).getKey()).toEpochSecond() * 1000); + assertEquals(946767600000L, ((ZonedDateTime) buckets.get(1).getKey()).toEpochSecond() * 1000); assertEquals(1, buckets.get(1).getDocCount()); r = client().prepareSearch("nanos") - .addAggregation(dateHistogram("histo").field("date") - .fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24)).timeZone(ZoneId.of("UTC"))) + .addAggregation( + dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24)).timeZone(ZoneId.of("UTC")) + ) .addDocValueField("date") .get(); assertSearchResponse(r); @@ -1601,19 +1816,18 @@ public void testDateNanosHistogram() throws Exception { histogram = r.getAggregations().get("histo"); buckets = histogram.getBuckets(); assertEquals(2, buckets.size()); - assertEquals(946684800000L, ((ZonedDateTime)buckets.get(0).getKey()).toEpochSecond() * 1000); + assertEquals(946684800000L, ((ZonedDateTime) buckets.get(0).getKey()).toEpochSecond() * 1000); assertEquals(1, buckets.get(0).getDocCount()); - assertEquals(946771200000L, ((ZonedDateTime)buckets.get(1).getKey()).toEpochSecond() * 1000); + assertEquals(946771200000L, ((ZonedDateTime) buckets.get(1).getKey()).toEpochSecond() * 1000); assertEquals(1, buckets.get(1).getDocCount()); } public void testDateKeyFormatting() { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .timeZone(ZoneId.of("America/Edmonton"))) - .get(); + .addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("America/Edmonton")) + ) + .get(); assertSearchResponse(response); @@ -1626,10 +1840,10 @@ public void testDateKeyFormatting() { public void testHardBoundsOnDates() { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo") - .field("date") - .calendarInterval(DateHistogramInterval.DAY) - .hardBounds(new LongBounds("2012-02-01T00:00:00.000", "2012-03-03T00:00:00.000")) + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.DAY) + .hardBounds(new LongBounds("2012-02-01T00:00:00.000", "2012-03-03T00:00:00.000")) ) .get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index aad4ef7963c4b..7c2c6128717a4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -36,7 +36,7 @@ * tests using all versions */ @ESIntegTestCase.SuiteScopeTestCase -@ESIntegTestCase.ClusterScope(scope= ESIntegTestCase.Scope.SUITE) +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) public class DateHistogramOffsetIT extends ESIntegTestCase { private static final String DATE_FORMAT = "yyyy-MM-dd:hh-mm-ss"; @@ -56,13 +56,14 @@ public void afterEachTest() throws IOException { internalCluster().wipeIndices("idx2"); } - private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, int idxIdStart) - throws IOException, InterruptedException { + private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, int idxIdStart) throws IOException, + InterruptedException { IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours]; for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) { - reqs[i - idxIdStart] = client().prepareIndex("idx2").setId("" + i) - .setSource(jsonBuilder().startObject().timeField("date", date).endObject()); + reqs[i - idxIdStart] = client().prepareIndex("idx2") + .setId("" + i) + .setSource(jsonBuilder().startObject().timeField("date", date).endObject()); date = date.plusHours(stepSizeHours); } indexRandom(true, reqs); @@ -72,13 +73,11 @@ public void testSingleValueWithPositiveOffset() throws Exception { prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, 1, 0); SearchResponse response = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) - .addAggregation(dateHistogram("date_histo") - .field("date") - .offset("2h") - .format(DATE_FORMAT) - .fixedInterval(DateHistogramInterval.DAY)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + dateHistogram("date_histo").field("date").offset("2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) + ) + .get(); assertThat(response.getHits().getTotalHits().value, equalTo(5L)); @@ -94,13 +93,11 @@ public void testSingleValueWithNegativeOffset() throws Exception { prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, -1, 0); SearchResponse response = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) - .addAggregation(dateHistogram("date_histo") - .field("date") - .offset("-2h") - .format(DATE_FORMAT) - .fixedInterval(DateHistogramInterval.DAY)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + dateHistogram("date_histo").field("date").offset("-2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) + ) + .get(); assertThat(response.getHits().getTotalHits().value, equalTo(5L)); @@ -120,14 +117,15 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception { prepareIndex(date("2014-03-14T00:00:00+00:00"), 12, 1, 13); SearchResponse response = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) - .addAggregation(dateHistogram("date_histo") - .field("date") - .offset("6h") - .minDocCount(0) - .format(DATE_FORMAT) - .fixedInterval(DateHistogramInterval.DAY)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + dateHistogram("date_histo").field("date") + .offset("6h") + .minDocCount(0) + .format(DATE_FORMAT) + .fixedInterval(DateHistogramInterval.DAY) + ) + .get(); assertThat(response.getHits().getTotalHits().value, equalTo(24L)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index a7340c8bee273..bba29ac405a84 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -55,12 +55,17 @@ public class DateRangeIT extends ESIntegTestCase { private static IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { - return client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field("value", value) - .timeField("date", date(month, day)) - .startArray("dates").timeValue(date(month, day)).timeValue(date(month + 1, day + 1)).endArray() - .endObject()); + return client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field("value", value) + .timeField("date", date(month, day)) + .startArray("dates") + .timeValue(date(month, day)) + .timeValue(date(month + 1, day + 1)) + .endArray() + .endObject() + ); } private static ZonedDateTime date(int month, int day) { @@ -72,6 +77,7 @@ private static ZonedDateTime date(int month, int day, ZoneId timezone) { } private static int numDocs; + @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); @@ -80,14 +86,16 @@ public void setupSuiteScopeCluster() throws Exception { numDocs = randomIntBetween(7, 20); List docs = new ArrayList<>(); - docs.addAll(Arrays.asList( + docs.addAll( + Arrays.asList( indexDoc(1, 2, 1), // Jan 2 indexDoc(2, 2, 2), // Feb 2 indexDoc(2, 15, 3), // Feb 15 indexDoc(3, 2, 4), // Mar 2 indexDoc(3, 15, 5), // Mar 15 indexDoc(3, 23, 6) // Mar 23 - )); + ) + ); // dummy docs for (int i = docs.size(); i < numDocs; ++i) { @@ -95,10 +103,11 @@ public void setupSuiteScopeCluster() throws Exception { } assertAcked(prepareCreate("empty_bucket_idx").setMapping("value", "type=integer")); for (int i = 0; i < 2; i++) { - docs.add(client().prepareIndex("empty_bucket_idx").setId(""+i).setSource(jsonBuilder() - .startObject() - .field("value", i*2) - .endObject())); + docs.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) + ); } indexRandom(true, docs); ensureSearchable(); @@ -118,11 +127,14 @@ public void testDateMath() throws Exception { } else { rangeBuilder.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)); } - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - rangeBuilder.addUnboundedTo("a long time ago", "now-50y").addRange("recently", "now-50y", "now-1y") - .addUnboundedFrom("last year", "now-1y").timeZone(ZoneId.of("Etc/GMT+5"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + rangeBuilder.addUnboundedTo("a long time ago", "now-50y") + .addRange("recently", "now-50y", "now-1y") + .addUnboundedFrom("last year", "now-1y") + .timeZone(ZoneId.of("Etc/GMT+5")) + ) + .get(); assertSearchResponse(response); @@ -151,16 +163,16 @@ public void testDateMath() throws Exception { public void testSingleValueField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateRange("range") - .field("date") - .addUnboundedTo(date(2, 15)) - .addRange(date(2, 15), date(3, 15)) - .addUnboundedFrom(date(3, 15))) - .get(); + .addAggregation( + dateRange("range").field("date") + .addUnboundedTo(date(2, 15)) + .addRange(date(2, 15), date(3, 15)) + .addUnboundedFrom(date(3, 15)) + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -197,16 +209,16 @@ public void testSingleValueField() throws Exception { public void testSingleValueFieldWithStringDates() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateRange("range") - .field("date") - .addUnboundedTo("2012-02-15") - .addRange("2012-02-15", "2012-03-15") - .addUnboundedFrom("2012-03-15")) - .get(); + .addAggregation( + dateRange("range").field("date") + .addUnboundedTo("2012-02-15") + .addRange("2012-02-15", "2012-03-15") + .addUnboundedFrom("2012-03-15") + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -243,17 +255,17 @@ public void testSingleValueFieldWithStringDates() throws Exception { public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateRange("range") - .field("date") - .format("yyyy-MM-dd") - .addUnboundedTo("2012-02-15") - .addRange("2012-02-15", "2012-03-15") - .addUnboundedFrom("2012-03-15")) - .get(); + .addAggregation( + dateRange("range").field("date") + .format("yyyy-MM-dd") + .addUnboundedTo("2012-02-15") + .addRange("2012-02-15", "2012-03-15") + .addUnboundedFrom("2012-03-15") + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -291,19 +303,20 @@ public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Excepti public void testSingleValueFieldWithDateMath() throws Exception { ZoneId timezone = randomZone(); int timeZoneOffset = timezone.getRules().getOffset(date(2, 15).toInstant()).getTotalSeconds(); - //there is a daylight saving time change on 11th March so suffix will be different - String feb15Suffix = timeZoneOffset == 0 ? "Z" : date(2,15, timezone).format(DateTimeFormatter.ofPattern("xxx", Locale.ROOT)); - String mar15Suffix = timeZoneOffset == 0 ? "Z" : date(3,15, timezone).format(DateTimeFormatter.ofPattern("xxx", Locale.ROOT)); + // there is a daylight saving time change on 11th March so suffix will be different + String feb15Suffix = timeZoneOffset == 0 ? "Z" : date(2, 15, timezone).format(DateTimeFormatter.ofPattern("xxx", Locale.ROOT)); + String mar15Suffix = timeZoneOffset == 0 ? "Z" : date(3, 15, timezone).format(DateTimeFormatter.ofPattern("xxx", Locale.ROOT)); long expectedFirstBucketCount = timeZoneOffset < 0 ? 3L : 2L; SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateRange("range") - .field("date") - .addUnboundedTo("2012-02-15") - .addRange("2012-02-15", "2012-02-15||+1M") - .addUnboundedFrom("2012-02-15||+1M") - .timeZone(timezone)) - .get(); + .addAggregation( + dateRange("range").field("date") + .addUnboundedTo("2012-02-15") + .addRange("2012-02-15", "2012-02-15||+1M") + .addUnboundedFrom("2012-02-15||+1M") + .timeZone(timezone) + ) + .get(); assertSearchResponse(response); @@ -324,8 +337,7 @@ public void testSingleValueFieldWithDateMath() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix + - "-2012-03-15T00:00:00.000" + mar15Suffix)); + assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix + "-2012-03-15T00:00:00.000" + mar15Suffix)); assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).withZoneSameInstant(ZoneOffset.UTC))); assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15, timezone).withZoneSameInstant(ZoneOffset.UTC))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix)); @@ -344,16 +356,16 @@ public void testSingleValueFieldWithDateMath() throws Exception { public void testSingleValueFieldWithCustomKey() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateRange("range") - .field("date") - .addUnboundedTo("r1", date(2, 15)) - .addRange("r2", date(2, 15), date(3, 15)) - .addUnboundedFrom("r3", date(3, 15))) - .get(); + .addAggregation( + dateRange("range").field("date") + .addUnboundedTo("r1", date(2, 15)) + .addRange("r2", date(2, 15), date(3, 15)) + .addUnboundedFrom("r3", date(3, 15)) + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -399,26 +411,26 @@ public void testSingleValueFieldWithCustomKey() throws Exception { public void testSingleValuedFieldWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateRange("range") - .field("date") - .addUnboundedTo("r1", date(2, 15)) - .addRange("r2", date(2, 15), date(3, 15)) - .addUnboundedFrom("r3", date(3, 15)) - .subAggregation(sum("sum").field("value"))) - .get(); + .addAggregation( + dateRange("range").field("date") + .addUnboundedTo("r1", date(2, 15)) + .addRange("r2", date(2, 15), date(3, 15)) + .addUnboundedFrom("r3", date(3, 15)) + .subAggregation(sum("sum").field("value")) + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); List buckets = range.getBuckets(); assertThat(buckets.size(), equalTo(3)); - assertThat(((InternalAggregation)range).getProperty("_bucket_count"), equalTo(3)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)range).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)range).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation)range).getProperty("sum.value"); + assertThat(((InternalAggregation) range).getProperty("_bucket_count"), equalTo(3)); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) range).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) range).getProperty("_count"); + Object[] propertiesCounts = (Object[]) ((InternalAggregation) range).getProperty("sum.value"); Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); @@ -464,8 +476,6 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { assertThat((long) propertiesDocCounts[2], equalTo(numDocs - 4L)); } - - /* Jan 2, Feb 3, 1 Feb 2, Mar 3, 2 @@ -477,16 +487,16 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { public void testMultiValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateRange("range") - .field("dates") - .addUnboundedTo(date(2, 15)) - .addRange(date(2, 15), date(3, 15)) - .addUnboundedFrom(date(3, 15))) - .get(); + .addAggregation( + dateRange("range").field("dates") + .addUnboundedTo(date(2, 15)) + .addRange(date(2, 15), date(3, 15)) + .addUnboundedFrom(date(3, 15)) + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -530,15 +540,18 @@ public void testMultiValuedField() throws Exception { Apr 23, May 24 6 */ - public void testMultiValuedFieldWithValueScript() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "dates"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateRange("range") - .field("dates") - .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.DOUBLE_PLUS_ONE_MONTH, params)) - .addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15))).get(); + .addAggregation( + dateRange("range").field("dates") + .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.DOUBLE_PLUS_ONE_MONTH, params)) + .addUnboundedTo(date(2, 15)) + .addRange(date(2, 15), date(3, 15)) + .addUnboundedFrom(date(3, 15)) + ) + .get(); assertSearchResponse(response); @@ -576,8 +589,6 @@ public void testMultiValuedFieldWithValueScript() throws Exception { assertThat(bucket.getDocCount(), equalTo(numDocs - 1L)); } - - /* Feb 2, Mar 3, 1 Mar 2, Apr 3, 2 @@ -591,16 +602,16 @@ public void testScriptSingleValue() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "date"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateRange("range") - .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) - .addUnboundedTo(date(2, 15)) - .addRange(date(2, 15), date(3, 15)) - .addUnboundedFrom(date(3, 15))) - .get(); + .addAggregation( + dateRange("range").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) + .addUnboundedTo(date(2, 15)) + .addRange(date(2, 15), date(3, 15)) + .addUnboundedFrom(date(3, 15)) + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -635,10 +646,6 @@ public void testScriptSingleValue() throws Exception { assertThat(bucket.getDocCount(), equalTo(numDocs - 4L)); } - - - - /* Jan 2, Feb 3, 1 Feb 2, Mar 3, 2 @@ -651,12 +658,14 @@ public void testScriptSingleValue() throws Exception { public void testScriptMultiValued() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "dates"); - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - dateRange("range").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) - .addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)) - .addUnboundedFrom(date(3, 15))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + dateRange("range").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) + .addUnboundedTo(date(2, 15)) + .addRange(date(2, 15), date(3, 15)) + .addUnboundedFrom(date(3, 15)) + ) + .get(); assertSearchResponse(response); @@ -698,16 +707,16 @@ public void testUnmapped() throws Exception { client().admin().cluster().prepareHealth("idx_unmapped").setWaitForYellowStatus().get(); SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(dateRange("range") - .field("date") - .addUnboundedTo(date(2, 15)) - .addRange(date(2, 15), date(3, 15)) - .addUnboundedFrom(date(3, 15))) - .get(); + .addAggregation( + dateRange("range").field("date") + .addUnboundedTo(date(2, 15)) + .addRange(date(2, 15), date(3, 15)) + .addUnboundedFrom(date(3, 15)) + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -744,16 +753,16 @@ public void testUnmapped() throws Exception { public void testUnmappedWithStringDates() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(dateRange("range") - .field("date") - .addUnboundedTo("2012-02-15") - .addRange("2012-02-15", "2012-03-15") - .addUnboundedFrom("2012-03-15")) - .get(); + .addAggregation( + dateRange("range").field("date") + .addUnboundedTo("2012-02-15") + .addRange("2012-02-15", "2012-03-15") + .addUnboundedFrom("2012-03-15") + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -790,16 +799,16 @@ public void testUnmappedWithStringDates() throws Exception { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(dateRange("range") - .field("date") - .addUnboundedTo(date(2, 15)) - .addRange(date(2, 15), date(3, 15)) - .addUnboundedFrom(date(3, 15))) - .get(); + .addAggregation( + dateRange("range").field("date") + .addUnboundedTo(date(2, 15)) + .addRange(date(2, 15), date(3, 15)) + .addUnboundedFrom(date(3, 15)) + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -836,10 +845,14 @@ public void testPartiallyUnmapped() throws Exception { public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) - .subAggregation(dateRange("date_range").field("value").addRange("0-1", 0, 1))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation(dateRange("date_range").field("value").addRange("0-1", 0, 1)) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -859,13 +872,11 @@ public void testEmptyAggregation() throws Exception { assertThat(buckets.get(0).getAggregations().asList().isEmpty(), is(true)); } - public void testNoRangesInQuery() { + public void testNoRangesInQuery() { try { - client().prepareSearch("idx") - .addAggregation(dateRange("my_date_range_agg").field("value")) - .get(); + client().prepareSearch("idx").addAggregation(dateRange("my_date_range_agg").field("value")).get(); fail(); - } catch (SearchPhaseExecutionException spee){ + } catch (SearchPhaseExecutionException spee) { Throwable rootCause = spee.getCause().getCause(); assertThat(rootCause, instanceOf(IllegalArgumentException.class)); assertEquals(rootCause.getMessage(), "No [ranges] specified for the [my_date_range_agg] aggregation"); @@ -877,60 +888,156 @@ public void testNoRangesInQuery() { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("date", "type=date") + assertAcked( + prepareCreate("cache_test_idx").setMapping("date", "type=date") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, - client().prepareIndex("cache_test_idx").setId("1") - .setSource(jsonBuilder().startObject().timeField("date", date(1, 1)).endObject()), - client().prepareIndex("cache_test_idx").setId("2") - .setSource(jsonBuilder().startObject().timeField("date", date(2, 1)).endObject())); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx") + .setId("1") + .setSource(jsonBuilder().startObject().timeField("date", date(1, 1)).endObject()), + client().prepareIndex("cache_test_idx") + .setId("2") + .setSource(jsonBuilder().startObject().timeField("date", date(2, 1)).endObject()) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached Map params = new HashMap<>(); params.put("fieldname", "date"); - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date") - .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.CURRENT_DATE, params)) - .addRange(ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), - ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))) - .get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + dateRange("foo").field("date") + .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.CURRENT_DATE, params)) + .addRange( + ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC) + ) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date") - .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.DOUBLE_PLUS_ONE_MONTH, params)) - .addRange(ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), - ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + dateRange("foo").field("date") + .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.DOUBLE_PLUS_ONE_MONTH, params)) + .addRange( + ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC) + ) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date") - .addRange(ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), - ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + dateRange("foo").field("date") + .addRange( + ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC) + ) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } /** @@ -940,16 +1047,19 @@ public void testScriptCaching() throws Exception { public void testRangeWithFormatStringValue() throws Exception { String indexName = "dateformat_test_idx"; assertAcked(prepareCreate(indexName).setMapping("date", "type=date,format=strict_hour_minute_second")); - indexRandom(true, - client().prepareIndex(indexName).setId("1").setSource(jsonBuilder().startObject().field("date", "00:16:40").endObject()), - client().prepareIndex(indexName).setId("2").setSource(jsonBuilder().startObject().field("date", "00:33:20").endObject()), - client().prepareIndex(indexName).setId("3").setSource(jsonBuilder().startObject().field("date", "00:50:00").endObject())); + indexRandom( + true, + client().prepareIndex(indexName).setId("1").setSource(jsonBuilder().startObject().field("date", "00:16:40").endObject()), + client().prepareIndex(indexName).setId("2").setSource(jsonBuilder().startObject().field("date", "00:33:20").endObject()), + client().prepareIndex(indexName).setId("3").setSource(jsonBuilder().startObject().field("date", "00:50:00").endObject()) + ); // using no format should work when to/from is compatible with format in // mapping - SearchResponse searchResponse = client().prepareSearch(indexName).setSize(0) - .addAggregation(dateRange("date_range").field("date").addRange("00:16:40", "00:50:00").addRange("00:50:00", "01:06:40")) - .get(); + SearchResponse searchResponse = client().prepareSearch(indexName) + .setSize(0) + .addAggregation(dateRange("date_range").field("date").addRange("00:16:40", "00:50:00").addRange("00:50:00", "01:06:40")) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); List buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00:16:40-00:50:00", 1000000L, 3000000L); @@ -957,9 +1067,12 @@ public void testRangeWithFormatStringValue() throws Exception { // using different format should work when to/from is compatible with // format in aggregation - searchResponse = client().prepareSearch(indexName).setSize(0).addAggregation( - dateRange("date_range").field("date").addRange("00.16.40", "00.50.00").addRange("00.50.00", "01.06.40").format("HH.mm.ss")) - .get(); + searchResponse = client().prepareSearch(indexName) + .setSize(0) + .addAggregation( + dateRange("date_range").field("date").addRange("00.16.40", "00.50.00").addRange("00.50.00", "01.06.40").format("HH.mm.ss") + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); @@ -967,20 +1080,26 @@ public void testRangeWithFormatStringValue() throws Exception { // providing numeric input with format should work, but bucket keys are // different now - searchResponse = client().prepareSearch(indexName).setSize(0) - .addAggregation( - dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis")) - .get(); + searchResponse = client().prepareSearch(indexName) + .setSize(0) + .addAggregation( + dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); // providing numeric input without format should throw an exception - ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> client().prepareSearch(indexName).setSize(0) - .addAggregation(dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000)).get()); - assertThat(e.getDetailedMessage(), - containsString("failed to parse date field [1000000] with format [strict_hour_minute_second]")); + ElasticsearchException e = expectThrows( + ElasticsearchException.class, + () -> client().prepareSearch(indexName) + .setSize(0) + .addAggregation(dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000)) + .get() + ); + assertThat(e.getDetailedMessage(), containsString("failed to parse date field [1000000] with format [strict_hour_minute_second]")); } /** @@ -990,31 +1109,39 @@ public void testRangeWithFormatStringValue() throws Exception { public void testRangeWithFormatNumericValue() throws Exception { String indexName = "dateformat_numeric_test_idx"; assertAcked(prepareCreate(indexName).setMapping("date", "type=date,format=epoch_second")); - indexRandom(true, - client().prepareIndex(indexName).setId("1").setSource(jsonBuilder().startObject().field("date", 1002).endObject()), - client().prepareIndex(indexName).setId("2").setSource(jsonBuilder().startObject().field("date", 2000).endObject()), - client().prepareIndex(indexName).setId("3").setSource(jsonBuilder().startObject().field("date", 3008).endObject())); + indexRandom( + true, + client().prepareIndex(indexName).setId("1").setSource(jsonBuilder().startObject().field("date", 1002).endObject()), + client().prepareIndex(indexName).setId("2").setSource(jsonBuilder().startObject().field("date", 2000).endObject()), + client().prepareIndex(indexName).setId("3").setSource(jsonBuilder().startObject().field("date", 3008).endObject()) + ); // using no format should work when to/from is compatible with format in // mapping - SearchResponse searchResponse = client().prepareSearch(indexName).setSize(0) - .addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000)).get(); + SearchResponse searchResponse = client().prepareSearch(indexName) + .setSize(0) + .addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000)) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); List buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); // using no format should also work when and to/from are string values - searchResponse = client().prepareSearch(indexName).setSize(0) - .addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000")).get(); + searchResponse = client().prepareSearch(indexName) + .setSize(0) + .addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000")) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); // also e-notation should work, fractional parts should be truncated - searchResponse = client().prepareSearch(indexName).setSize(0) - .addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3)).get(); + searchResponse = client().prepareSearch(indexName) + .setSize(0) + .addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3)) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); @@ -1022,9 +1149,12 @@ public void testRangeWithFormatNumericValue() throws Exception { // using different format should work when to/from is compatible with // format in aggregation - searchResponse = client().prepareSearch(indexName).setSize(0).addAggregation( - dateRange("date_range").field("date").addRange("00.16.40", "00.50.00").addRange("00.50.00", "01.06.40").format("HH.mm.ss")) - .get(); + searchResponse = client().prepareSearch(indexName) + .setSize(0) + .addAggregation( + dateRange("date_range").field("date").addRange("00.16.40", "00.50.00").addRange("00.50.00", "01.06.40").format("HH.mm.ss") + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); @@ -1032,10 +1162,12 @@ public void testRangeWithFormatNumericValue() throws Exception { // providing different numeric input with format should work, but bucket // keys are different now - searchResponse = client().prepareSearch(indexName).setSize(0) - .addAggregation( - dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis")) - .get(); + searchResponse = client().prepareSearch(indexName) + .setSize(0) + .addAggregation( + dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java index 85fdef887d87b..62c8d704c669b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -49,42 +49,47 @@ public String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SamplerAggregator.ExecutionMode.values()).toString(); } - @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("test").setSettings( - Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0)).setMapping( - "author", "type=keyword", "name", "type=keyword", "genre", - "type=keyword", "price", "type=float")); + assertAcked( + prepareCreate("test").setSettings( + Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0) + ).setMapping("author", "type=keyword", "name", "type=keyword", "genre", "type=keyword", "price", "type=float") + ); createIndex("idx_unmapped"); // idx_unmapped_author is same as main index but missing author field - assertAcked(prepareCreate("idx_unmapped_author").setSettings( - Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .setMapping("name", "type=keyword", "genre", "type=keyword", "price", - "type=float")); + assertAcked( + prepareCreate("idx_unmapped_author").setSettings( + Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0) + ).setMapping("name", "type=keyword", "genre", "type=keyword", "price", "type=float") + ); ensureGreen(); String data[] = { - // "id,cat,name,price,inStock,author_t,series_t,sequence_i,genre_s", - "0553573403,book,A Game of Thrones,7.99,true,George R.R. Martin,A Song of Ice and Fire,1,fantasy", - "0553579908,book,A Clash of Kings,7.99,true,George R.R. Martin,A Song of Ice and Fire,2,fantasy", - "055357342X,book,A Storm of Swords,7.99,true,George R.R. Martin,A Song of Ice and Fire,3,fantasy", - "0553293354,book,Foundation,17.99,true,Isaac Asimov,Foundation Novels,1,scifi", - "0812521390,book,The Black Company,6.99,false,Glen Cook,The Chronicles of The Black Company,1,fantasy", - "0812550706,book,Ender's Game,6.99,true,Orson Scott Card,Ender,1,scifi", - "0441385532,book,Jhereg,7.95,false,Steven Brust,Vlad Taltos,1,fantasy", - "0380014300,book,Nine Princes In Amber,6.99,true,Roger Zelazny,the Chronicles of Amber,1,fantasy", - "0805080481,book,The Book of Three,5.99,true,Lloyd Alexander,The Chronicles of Prydain,1,fantasy", - "080508049X,book,The Black Cauldron,5.99,true,Lloyd Alexander,The Chronicles of Prydain,2,fantasy" + // "id,cat,name,price,inStock,author_t,series_t,sequence_i,genre_s", + "0553573403,book,A Game of Thrones,7.99,true,George R.R. Martin,A Song of Ice and Fire,1,fantasy", + "0553579908,book,A Clash of Kings,7.99,true,George R.R. Martin,A Song of Ice and Fire,2,fantasy", + "055357342X,book,A Storm of Swords,7.99,true,George R.R. Martin,A Song of Ice and Fire,3,fantasy", + "0553293354,book,Foundation,17.99,true,Isaac Asimov,Foundation Novels,1,scifi", + "0812521390,book,The Black Company,6.99,false,Glen Cook,The Chronicles of The Black Company,1,fantasy", + "0812550706,book,Ender's Game,6.99,true,Orson Scott Card,Ender,1,scifi", + "0441385532,book,Jhereg,7.95,false,Steven Brust,Vlad Taltos,1,fantasy", + "0380014300,book,Nine Princes In Amber,6.99,true,Roger Zelazny,the Chronicles of Amber,1,fantasy", + "0805080481,book,The Book of Three,5.99,true,Lloyd Alexander,The Chronicles of Prydain,1,fantasy", + "080508049X,book,The Black Cauldron,5.99,true,Lloyd Alexander,The Chronicles of Prydain,2,fantasy" - }; + }; for (int i = 0; i < data.length; i++) { String[] parts = data[i].split(","); - client().prepareIndex("test").setId("" + i) - .setSource("author", parts[5], "name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])).get(); - client().prepareIndex("idx_unmapped_author").setId("" + i) - .setSource("name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])).get(); + client().prepareIndex("test") + .setId("" + i) + .setSource("author", parts[5], "name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) + .get(); + client().prepareIndex("idx_unmapped_author") + .setId("" + i) + .setSource("name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) + .get(); } client().admin().indices().refresh(new RefreshRequest("test")).get(); } @@ -93,13 +98,14 @@ public void testIssue10719() throws Exception { // Tests that we can refer to nested elements under a sample in a path // statement boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) - .addAggregation(terms("genres") - .field("genre") - .order(BucketOrder.aggregation("sample>max_price.value", asc)) - .subAggregation(sampler("sample").shardSize(100) - .subAggregation(max("max_price").field("price"))) - ).get(); + SearchResponse response = client().prepareSearch("test") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .addAggregation( + terms("genres").field("genre") + .order(BucketOrder.aggregation("sample>max_price.value", asc)) + .subAggregation(sampler("sample").shardSize(100).subAggregation(max("max_price").field("price"))) + ) + .get(); assertSearchResponse(response); Terms genres = response.getAggregations().get("genres"); Collection genreBuckets = genres.getBuckets(); @@ -126,11 +132,12 @@ public void testSimpleDiversity() throws Exception { sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("test") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setQuery(new TermQueryBuilder("genre", "fantasy")) - .setFrom(0).setSize(60) - .addAggregation(sampleAgg) - .get(); + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setQuery(new TermQueryBuilder("genre", "fantasy")) + .setFrom(0) + .setSize(60) + .addAggregation(sampleAgg) + .get(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); Terms authors = sample.getAggregations().get("authors"); @@ -151,8 +158,7 @@ public void testNestedDiversity() throws Exception { sampleAgg.subAggregation(terms("authors").field("author")); rootTerms.subAggregation(sampleAgg); - SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) - .addAggregation(rootTerms).get(); + SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(rootTerms).get(); assertSearchResponse(response); Terms genres = response.getAggregations().get("genres"); List genreBuckets = genres.getBuckets(); @@ -172,8 +178,8 @@ public void testNestedSamples() throws Exception { int MAX_DOCS_PER_AUTHOR = 1; int MAX_DOCS_PER_GENRE = 2; DiversifiedAggregationBuilder rootSample = new DiversifiedAggregationBuilder("genreSample").shardSize(100) - .field("genre") - .maxDocsPerValue(MAX_DOCS_PER_GENRE); + .field("genre") + .maxDocsPerValue(MAX_DOCS_PER_GENRE); DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); @@ -181,8 +187,10 @@ public void testNestedSamples() throws Exception { sampleAgg.subAggregation(terms("genres").field("genre")); rootSample.subAggregation(sampleAgg); - SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(rootSample) - .get(); + SearchResponse response = client().prepareSearch("test") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .addAggregation(rootSample) + .get(); assertSearchResponse(response); Sampler genreSample = response.getAggregations().get("genreSample"); Sampler sample = genreSample.getAggregations().get("sample"); @@ -203,12 +211,17 @@ public void testNestedSamples() throws Exception { public void testPartiallyUnmappedDiversifyField() throws Exception { // One of the indexes is missing the "author" field used for // diversifying results - DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100).field("author") - .maxDocsPerValue(1); + DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100) + .field("author") + .maxDocsPerValue(1); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("idx_unmapped_author", "test").setSearchType(SearchType.QUERY_THEN_FETCH) - .setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg) - .get(); + SearchResponse response = client().prepareSearch("idx_unmapped_author", "test") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setQuery(new TermQueryBuilder("genre", "fantasy")) + .setFrom(0) + .setSize(60) + .addAggregation(sampleAgg) + .get(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), greaterThan(0L)); @@ -217,13 +230,18 @@ public void testPartiallyUnmappedDiversifyField() throws Exception { } public void testWhollyUnmappedDiversifyField() throws Exception { - //All of the indices are missing the "author" field used for diversifying results + // All of the indices are missing the "author" field used for diversifying results int MAX_DOCS_PER_AUTHOR = 1; DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("idx_unmapped", "idx_unmapped_author").setSearchType(SearchType.QUERY_THEN_FETCH) - .setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg).get(); + SearchResponse response = client().prepareSearch("idx_unmapped", "idx_unmapped_author") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setQuery(new TermQueryBuilder("genre", "fantasy")) + .setFrom(0) + .setSize(60) + .addAggregation(sampleAgg) + .get(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), equalTo(0L)); @@ -239,7 +257,8 @@ public void testRidiculousSizeDiversity() throws Exception { SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) - .setFrom(0).setSize(60) + .setFrom(0) + .setSize(60) .addAggregation(sampleAgg) .get(); assertSearchResponse(response); @@ -250,7 +269,8 @@ public void testRidiculousSizeDiversity() throws Exception { response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) - .setFrom(0).setSize(60) + .setFrom(0) + .setSize(60) .addAggregation(sampleAgg) .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index 9c6d2817fb535..df7d96c379d0e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -122,30 +122,45 @@ public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); List builders = new ArrayList<>(); for (int i = 0; i < NUM_DOCS; i++) { - builders.add(client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, (double) i) - .field("num_tag", i < NUM_DOCS/2 + 1 ? 1 : 0) // used to test order by single-bucket sub agg - .field("constant", 1) - .startArray(MULTI_VALUED_FIELD_NAME).value((double) i).value(i + 1d).endArray() - .endObject())); + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, (double) i) + .field("num_tag", i < NUM_DOCS / 2 + 1 ? 1 : 0) // used to test order by single-bucket sub agg + .field("constant", 1) + .startArray(MULTI_VALUED_FIELD_NAME) + .value((double) i) + .value(i + 1d) + .endArray() + .endObject() + ) + ); } for (int i = 0; i < 100; i++) { - builders.add(client().prepareIndex("high_card_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, (double) i) - .startArray(MULTI_VALUED_FIELD_NAME).value((double)i).value(i + 1d).endArray() - .endObject())); + builders.add( + client().prepareIndex("high_card_idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, (double) i) + .startArray(MULTI_VALUED_FIELD_NAME) + .value((double) i) + .value(i + 1d) + .endArray() + .endObject() + ) + ); } createIndex("idx_unmapped"); assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId(""+i).setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, i*2) - .endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); } getMultiSortDocs(builders); @@ -201,67 +216,47 @@ private void getMultiSortDocs(List builders) throws IOExcep assertAcked(prepareCreate("sort_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=double")); for (int i = 1; i <= 3; i++) { - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 1) - .field("l", 1) - .field("d", i) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 2) - .field("l", 2) - .field("d", i) - .endObject())); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 1).field("l", 1).field("d", i).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 2).field("l", 2).field("d", i).endObject()) + ); } - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 3) - .field("l", 3) - .field("d", 1) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 3) - .field("l", 3) - .field("d", 2) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 4) - .field("l", 3) - .field("d", 1) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 4) - .field("l", 3) - .field("d", 3) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 5) - .field("l", 5) - .field("d", 1) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 5) - .field("l", 5) - .field("d", 2) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 6) - .field("l", 5) - .field("d", 1) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 7) - .field("l", 5) - .field("d", 1) - .endObject())); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 3).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 6).field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 7).field("l", 5).field("d", 1).endObject()) + ); } private String key(Terms.Bucket bucket) { @@ -270,11 +265,17 @@ private String key(Terms.Bucket bucket) { // the main purpose of this test is to make sure we're not allocating 2GB of memory per shard public void testSizeIsZero() { - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> client() - .prepareSearch("high_card_idx").addAggregation(terms("terms").field(SINGLE_VALUED_FIELD_NAME) - .minDocCount(randomInt(1)).size(0).collectMode(randomFrom(SubAggCollectionMode.values()))) - .get()); + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("high_card_idx") + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .minDocCount(randomInt(1)) + .size(0) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get() + ); assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [terms]")); } @@ -289,8 +290,8 @@ public void testMultiValueFieldWithPartitionedFiltering() throws Exception { private void runTestFieldWithPartitionedFiltering(String field) throws Exception { // Find total number of unique terms SearchResponse allResponse = client().prepareSearch("idx") - .addAggregation(terms("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation(terms("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values()))) + .get(); assertSearchResponse(allResponse); Terms terms = allResponse.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -301,9 +302,13 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception final int numPartitions = randomIntBetween(2, 4); Set foundTerms = new HashSet<>(); for (int partition = 0; partition < numPartitions; partition++) { - SearchResponse response = client().prepareSearch("idx").addAggregation(terms("terms").field(field) - .includeExclude(new IncludeExclude(partition, numPartitions)).collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field(field) + .includeExclude(new IncludeExclude(partition, numPartitions)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(response); terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -318,15 +323,15 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))) - .get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + ) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -335,7 +340,7 @@ public void testSingleValuedFieldWithValueScript() throws Exception { for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("" + (i + 1d)); assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (i+1d))); + assertThat(key(bucket), equalTo("" + (i + 1d))); assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i + 1)); assertThat(bucket.getDocCount(), equalTo(1L)); } @@ -343,15 +348,15 @@ public void testSingleValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))) - .get(); + .addAggregation( + terms("terms").field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + ) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -360,7 +365,7 @@ public void testMultiValuedFieldWithValueScript() throws Exception { for (int i = 0; i < 6; i++) { Terms.Bucket bucket = terms.getBucketByKey("" + (i + 1d)); assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (i+1d))); + assertThat(key(bucket), equalTo("" + (i + 1d))); assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i + 1)); if (i == 0 || i == 5) { assertThat(bucket.getDocCount(), equalTo(1L)); @@ -372,16 +377,15 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "(long) (_value / 1000 + 1)", Collections.emptyMap()))) - .get(); + .addAggregation( + terms("terms").field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "(long) (_value / 1000 + 1)", Collections.emptyMap())) + ) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -412,14 +416,19 @@ public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { */ public void testScriptSingleValue() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, - CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())) + .script( + new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + MULTI_VALUED_FIELD_NAME + "'].value", + Collections.emptyMap() + ) + ) + ) + .get(); assertSearchResponse(response); @@ -438,14 +447,19 @@ public void testScriptSingleValue() throws Exception { } public void testScriptMultiValued() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", - Collections.emptyMap()))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())) + .script( + new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + MULTI_VALUED_FIELD_NAME + "']", + Collections.emptyMap() + ) + ) + ) + .get(); assertSearchResponse(response); @@ -469,14 +483,11 @@ public void testScriptMultiValued() throws Exception { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped", "idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation(terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -493,15 +504,13 @@ public void testPartiallyUnmapped() throws Exception { public void testPartiallyUnmappedWithFormat() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped", "idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .format("0000.00")) - .get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())).format("0000.00") + ) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -519,17 +528,15 @@ public void testPartiallyUnmappedWithFormat() throws Exception { public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithSubTermsAgg() throws Exception { boolean asc = true; - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", asc)) - .subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)) - .subAggregation( - terms("subTerms").field(MULTI_VALUED_FIELD_NAME).collectMode( - randomFrom(SubAggCollectionMode.values())))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("avg_i", asc)) + .subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)) + .subAggregation(terms("subTerms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))) + ) + .get(); assertSearchResponse(response); @@ -541,7 +548,7 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithSubTer for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("" + (double) i); assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double)i)); + assertThat(key(bucket), equalTo("" + (double) i)); assertThat(bucket.getDocCount(), equalTo(1L)); Avg avg = bucket.getAggregations().get("avg_i"); @@ -563,13 +570,14 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithSubTer public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws Exception { boolean asc = randomBoolean(); - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("num_tags").field("num_tag").collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter", asc)) - .subAggregation(filter("filter", QueryBuilders.matchAllQuery()))).get(); - + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("num_tags").field("num_tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter", asc)) + .subAggregation(filter("filter", QueryBuilders.matchAllQuery())) + ) + .get(); assertSearchResponse(response); @@ -599,18 +607,18 @@ public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels() throws Exception { boolean asc = randomBoolean(); - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("tags") - .field("num_tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter1>filter2>max", asc)) - .subAggregation( - filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( - filter("filter2", QueryBuilders.matchAllQuery()).subAggregation( - max("max").field(SINGLE_VALUED_FIELD_NAME))))).get(); - + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("tags").field("num_tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter1>filter2>max", asc)) + .subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)) + ) + ) + ) + .get(); assertSearchResponse(response); @@ -657,9 +665,12 @@ public void testSingleValuedFieldOrderedByMissingSubAggregation() throws Excepti for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) - .addAggregation( - terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", true))).get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("avg_i", true)) + ) + .get(); fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation that doesn't exist"); @@ -673,14 +684,13 @@ public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation( for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) - .addAggregation( - terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("num_tags", true)) - .subAggregation( - terms("num_tags").field("num_tags").collectMode(randomFrom(SubAggCollectionMode.values())))) - .get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("num_tags", true)) + .subAggregation(terms("num_tags").field("num_tags").collectMode(randomFrom(SubAggCollectionMode.values()))) + ) + .get(); fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation which is not of a metrics type"); @@ -694,13 +704,18 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMe for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) - .addAggregation( - terms("terms").field(SINGLE_VALUED_FIELD_NAME + "2").collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.foo", true)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))).get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME + "2") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.foo", true)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); - fail("Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " + - "with an unknown specified metric to order by"); + fail( + "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " + + "with an unknown specified metric to order by" + ); } catch (ElasticsearchException e) { // expected @@ -712,13 +727,18 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) - .addAggregation( - terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats", true)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))).get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats", true)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); - fail("Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " + - "where the metric name is not specified"); + fail( + "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " + + "where the metric name is not specified" + ); } catch (ElasticsearchException e) { // expected @@ -728,12 +748,14 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception { boolean asc = true; - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); assertSearchResponse(response); @@ -745,7 +767,7 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws E for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("" + (double) i); assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double)i)); + assertThat(key(bucket), equalTo("" + (double) i)); assertThat(bucket.getDocCount(), equalTo(1L)); Stats stats = bucket.getAggregations().get("stats"); @@ -756,12 +778,14 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws E public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws Exception { boolean asc = false; - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); assertSearchResponse(response); @@ -773,7 +797,7 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws for (int i = 4; i >= 0; i--) { Terms.Bucket bucket = terms.getBucketByKey("" + (double) i); assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double)i)); + assertThat(key(bucket), equalTo("" + (double) i)); assertThat(bucket.getDocCount(), equalTo(1L)); Stats stats = bucket.getAggregations().get("stats"); @@ -784,12 +808,14 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Exception { boolean asc = true; - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.variance", asc)) - .subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.variance", asc)) + .subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); assertSearchResponse(response); @@ -801,7 +827,7 @@ public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Ex for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("" + (double) i); assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double)i)); + assertThat(key(bucket), equalTo("" + (double) i)); assertThat(bucket.getDocCount(), equalTo(1L)); ExtendedStats stats = bucket.getAggregations().get("stats"); @@ -813,19 +839,23 @@ public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Ex public void testScriptScore() { Script scoringScript = new Script( - ScriptType.INLINE, CustomScriptPlugin .NAME, "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()); + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", + Collections.emptyMap() + ); Script aggregationScript = new Script( - ScriptType.INLINE, CustomScriptPlugin.NAME, "ceil(_score.doubleValue()/3)", Collections.emptyMap()); + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "ceil(_score.doubleValue()/3)", + Collections.emptyMap() + ); - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(functionScoreQuery(scriptFunction(scoringScript))) - .addAggregation( - terms("terms") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(aggregationScript)) - .get(); + SearchResponse response = client().prepareSearch("idx") + .setQuery(functionScoreQuery(scriptFunction(scoringScript))) + .addAggregation(terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).script(aggregationScript)) + .get(); assertSearchResponse(response); @@ -837,7 +867,7 @@ public void testScriptScore() { for (int i = 0; i < 3; i++) { Terms.Bucket bucket = terms.getBucketByKey("" + (double) i); assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double)i)); + assertThat(key(bucket), equalTo("" + (double) i)); assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); assertThat(bucket.getDocCount(), equalTo(i == 1 ? 3L : 1L)); } @@ -870,9 +900,12 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscSingleValu public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception { double[] expectedKeys = new double[] { 2, 1, 4, 5, 3, 6, 7 }; - assertMultiSortResponse(expectedKeys, BucketOrder.count(false), - BucketOrder.aggregation("sum_d", false), - BucketOrder.aggregation("avg_l", false)); + assertMultiSortResponse( + expectedKeys, + BucketOrder.count(false), + BucketOrder.aggregation("sum_d", false), + BucketOrder.aggregation("avg_l", false) + ); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception { @@ -881,12 +914,15 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound } private void assertMultiSortResponse(double[] expectedKeys, BucketOrder... order) { - SearchResponse response = client() - .prepareSearch("sort_idx") - .addAggregation( - terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.compound(order)).subAggregation(avg("avg_l").field("l")) - .subAggregation(sum("sum_d").field("d"))).get(); + SearchResponse response = client().prepareSearch("sort_idx") + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.compound(order)) + .subAggregation(avg("avg_l").field("l")) + .subAggregation(sum("sum_d").field("d")) + ) + .get(); assertSearchResponse(response); @@ -919,47 +955,132 @@ public void testOtherDocCount() { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=float") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=float") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1.5), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2.5)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1.5), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2.5) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - terms("terms").field("d").script( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap()))).get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + terms("terms").field("d") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - terms("terms").field("d").script( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + terms("terms").field("d") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(terms("terms").field("d")).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index 92b3fd7bd5654..3b66b224717c3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -48,44 +48,41 @@ public void setupSuiteScopeCluster() throws Exception { numTag1Docs = randomIntBetween(1, numDocs - 1); List builders = new ArrayList<>(); for (int i = 0; i < numTag1Docs; i++) { - builders.add(client().prepareIndex("idx").setId(""+i).setSource(jsonBuilder() - .startObject() - .field("value", i + 1) - .field("tag", "tag1") - .endObject())); + builders.add( + client().prepareIndex("idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field("value", i + 1).field("tag", "tag1").endObject()) + ); } for (int i = numTag1Docs; i < numDocs; i++) { - XContentBuilder source = jsonBuilder() - .startObject() - .field("value", i) - .field("tag", "tag2") - .field("name", "name" + i) - .endObject(); - builders.add(client().prepareIndex("idx").setId(""+i).setSource(source)); + XContentBuilder source = jsonBuilder().startObject() + .field("value", i) + .field("tag", "tag2") + .field("name", "name" + i) + .endObject(); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { // randomly index the document twice so that we have deleted docs that match the filter - builders.add(client().prepareIndex("idx").setId(""+i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId(""+i).setSource(jsonBuilder() - .startObject() - .field("value", i*2) - .endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); } public void testSimple() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(filter("tag1", termQuery("tag", "tag1"))) - .get(); + SearchResponse response = client().prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1"))).get(); assertSearchResponse(response); - Filter filter = response.getAggregations().get("tag1"); assertThat(filter, notNullValue()); assertThat(filter.getName(), equalTo("tag1")); @@ -107,18 +104,16 @@ public void testEmptyFilterDeclarations() throws Exception { public void testWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(filter("tag1", termQuery("tag", "tag1")) - .subAggregation(avg("avg_value").field("value"))) - .get(); + .addAggregation(filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value").field("value"))) + .get(); assertSearchResponse(response); - Filter filter = response.getAggregations().get("tag1"); assertThat(filter, notNullValue()); assertThat(filter.getName(), equalTo("tag1")); assertThat(filter.getDocCount(), equalTo((long) numTag1Docs)); - assertThat((long) ((InternalAggregation)filter).getProperty("_count"), equalTo((long) numTag1Docs)); + assertThat((long) ((InternalAggregation) filter).getProperty("_count"), equalTo((long) numTag1Docs)); long sum = 0; for (int i = 0; i < numTag1Docs; ++i) { @@ -129,14 +124,13 @@ public void testWithSubAggregation() throws Exception { assertThat(avgValue, notNullValue()); assertThat(avgValue.getName(), equalTo("avg_value")); assertThat(avgValue.getValue(), equalTo((double) sum / numTag1Docs)); - assertThat((double) ((InternalAggregation)filter).getProperty("avg_value.value"), equalTo((double) sum / numTag1Docs)); + assertThat((double) ((InternalAggregation) filter).getProperty("avg_value.value"), equalTo((double) sum / numTag1Docs)); } public void testAsSubAggregation() { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field("value").interval(2L).subAggregation( - filter("filter", matchAllQuery()))).get(); + .addAggregation(histogram("histo").field("value").interval(2L).subAggregation(filter("filter", matchAllQuery()))) + .get(); assertSearchResponse(response); @@ -153,13 +147,12 @@ public void testAsSubAggregation() { public void testWithContextBasedSubAggregation() throws Exception { try { - client().prepareSearch("idx") - .addAggregation(filter("tag1", termQuery("tag", "tag1")) - .subAggregation(avg("avg_value"))) - .get(); + client().prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value"))).get(); - fail("expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source" + - "context which the sub-aggregation can inherit"); + fail( + "expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source" + + "context which the sub-aggregation can inherit" + ); } catch (ElasticsearchException e) { assertThat(e.getMessage(), is("all shards failed")); @@ -168,10 +161,9 @@ public void testWithContextBasedSubAggregation() throws Exception { public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) - .subAggregation(filter("filter", matchAllQuery()))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(filter("filter", matchAllQuery()))) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index 1c2a59d1a4629..bbf2806673004 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -55,33 +55,31 @@ public void setupSuiteScopeCluster() throws Exception { numTag2Docs = randomIntBetween(1, numDocs - numTag1Docs); List builders = new ArrayList<>(); for (int i = 0; i < numTag1Docs; i++) { - XContentBuilder source = jsonBuilder() - .startObject() - .field("value", i + 1) - .field("tag", "tag1") - .endObject(); - builders.add(client().prepareIndex("idx").setId(""+i).setSource(source)); + XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).field("tag", "tag1").endObject(); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { // randomly index the document twice so that we have deleted docs that match the filter - builders.add(client().prepareIndex("idx").setId(""+i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } for (int i = numTag1Docs; i < (numTag1Docs + numTag2Docs); i++) { - XContentBuilder source = jsonBuilder() - .startObject() - .field("value", i) - .field("tag", "tag2") - .field("name", "name" + i) - .endObject(); - builders.add(client().prepareIndex("idx").setId(""+i).setSource(source)); + XContentBuilder source = jsonBuilder().startObject() + .field("value", i) + .field("tag", "tag2") + .field("name", "name" + i) + .endObject(); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { - builders.add(client().prepareIndex("idx").setId(""+i).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } for (int i = numTag1Docs + numTag2Docs; i < numDocs; i++) { numOtherDocs++; - XContentBuilder source = jsonBuilder().startObject().field("value", i).field("tag", "tag3").field("name", "name" + i) - .endObject(); + XContentBuilder source = jsonBuilder().startObject() + .field("value", i) + .field("tag", "tag3") + .field("name", "name" + i) + .endObject(); builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); if (randomBoolean()) { builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); @@ -89,20 +87,25 @@ public void setupSuiteScopeCluster() throws Exception { } prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId(""+i).setSource(jsonBuilder() - .startObject() - .field("value", i*2) - .endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); } public void testSimple() throws Exception { - SearchResponse response = client().prepareSearch("idx").addAggregation( - filters("tags", randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), - new KeyedFilter("tag2", termQuery("tag", "tag2"))))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + ) + ) + .get(); assertSearchResponse(response); @@ -126,9 +129,10 @@ public void testSimple() throws Exception { public void testEmptyFilterDeclarations() throws Exception { QueryBuilder emptyFilter = new BoolQueryBuilder(); SearchResponse response = client().prepareSearch("idx") - .addAggregation(filters("tags", randomOrder(new KeyedFilter("all", emptyFilter), - new KeyedFilter("tag1", termQuery("tag", "tag1"))))) - .get(); + .addAggregation( + filters("tags", randomOrder(new KeyedFilter("all", emptyFilter), new KeyedFilter("tag1", termQuery("tag", "tag1")))) + ) + .get(); assertSearchResponse(response); @@ -144,9 +148,13 @@ public void testEmptyFilterDeclarations() throws Exception { public void testWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(filters("tags", randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), - new KeyedFilter("tag2", termQuery("tag", "tag2")))).subAggregation(avg("avg_value").field("value"))) - .get(); + .addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + ).subAggregation(avg("avg_value").field("value")) + ) + .get(); assertSearchResponse(response); @@ -155,10 +163,10 @@ public void testWithSubAggregation() throws Exception { assertThat(filters.getName(), equalTo("tags")); assertThat(filters.getBuckets().size(), equalTo(2)); - assertThat(((InternalAggregation)filters).getProperty("_bucket_count"), equalTo(2)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)filters).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)filters).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation)filters).getProperty("avg_value.value"); + assertThat(((InternalAggregation) filters).getProperty("_bucket_count"), equalTo(2)); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) filters).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) filters).getProperty("_count"); + Object[] propertiesCounts = (Object[]) ((InternalAggregation) filters).getProperty("avg_value.value"); Filters.Bucket bucket = filters.getBucketByKey("tag1"); assertThat(bucket, Matchers.notNullValue()); @@ -195,9 +203,8 @@ public void testWithSubAggregation() throws Exception { public void testAsSubAggregation() { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field("value").interval(2L).subAggregation( - filters("filters", matchAllQuery()))).get(); + .addAggregation(histogram("histo").field("value").interval(2L).subAggregation(filters("filters", matchAllQuery()))) + .get(); assertSearchResponse(response); @@ -218,14 +225,18 @@ public void testWithContextBasedSubAggregation() throws Exception { try { client().prepareSearch("idx") - .addAggregation( - filters("tags", - randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), - new KeyedFilter("tag2", termQuery("tag", "tag2")))).subAggregation(avg("avg_value"))) - .get(); + .addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + ).subAggregation(avg("avg_value")) + ) + .get(); - fail("expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source" + - "context which the sub-aggregation can inherit"); + fail( + "expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source" + + "context which the sub-aggregation can inherit" + ); } catch (ElasticsearchException e) { assertThat(e.getMessage(), is("all shards failed")); @@ -234,10 +245,14 @@ public void testWithContextBasedSubAggregation() throws Exception { public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) - .subAggregation(filters("filters", new KeyedFilter("all", matchAllQuery())))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation(filters("filters", new KeyedFilter("all", matchAllQuery()))) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -255,7 +270,8 @@ public void testEmptyAggregation() throws Exception { public void testSimpleNonKeyed() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2"))).get(); + .addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2"))) + .get(); assertSearchResponse(response); @@ -278,10 +294,14 @@ public void testSimpleNonKeyed() throws Exception { } public void testOtherBucket() throws Exception { - SearchResponse response = client().prepareSearch("idx").addAggregation( - filters("tags", randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), - new KeyedFilter("tag2", termQuery("tag", "tag2")))).otherBucket(true)) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + ).otherBucket(true) + ) + .get(); assertSearchResponse(response); @@ -306,9 +326,13 @@ public void testOtherBucket() throws Exception { public void testOtherNamedBucket() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(filters("tags", randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), - new KeyedFilter("tag2", termQuery("tag", "tag2")))).otherBucket(true).otherBucketKey("foobar")) - .get(); + .addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + ).otherBucket(true).otherBucketKey("foobar") + ) + .get(); assertSearchResponse(response); @@ -333,7 +357,8 @@ public void testOtherNamedBucket() throws Exception { public void testOtherNonKeyed() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2")).otherBucket(true)).get(); + .addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2")).otherBucket(true)) + .get(); assertSearchResponse(response); @@ -361,10 +386,13 @@ public void testOtherNonKeyed() throws Exception { public void testOtherWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(filters("tags", randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), - new KeyedFilter("tag2", termQuery("tag", "tag2")))).otherBucket(true) - .subAggregation(avg("avg_value").field("value"))) - .get(); + .addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + ).otherBucket(true).subAggregation(avg("avg_value").field("value")) + ) + .get(); assertSearchResponse(response); @@ -373,10 +401,10 @@ public void testOtherWithSubAggregation() throws Exception { assertThat(filters.getName(), equalTo("tags")); assertThat(filters.getBuckets().size(), equalTo(3)); - assertThat(((InternalAggregation)filters).getProperty("_bucket_count"), equalTo(3)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)filters).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)filters).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation)filters).getProperty("avg_value.value"); + assertThat(((InternalAggregation) filters).getProperty("_bucket_count"), equalTo(3)); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) filters).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) filters).getProperty("_count"); + Object[] propertiesCounts = (Object[]) ((InternalAggregation) filters).getProperty("avg_value.value"); Filters.Bucket bucket = filters.getBucketByKey("tag1"); assertThat(bucket, Matchers.notNullValue()); @@ -429,11 +457,14 @@ public void testOtherWithSubAggregation() throws Exception { public void testEmptyAggregationWithOtherBucket() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) - .subAggregation(filters("filters", new KeyedFilter("foo", matchAllQuery())) - .otherBucket(true).otherBucketKey("bar"))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation(filters("filters", new KeyedFilter("foo", matchAllQuery())).otherBucket(true).otherBucketKey("bar")) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index ae23c6b0997a8..8e0dd0ba6638c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -69,18 +69,15 @@ private IndexRequestBuilder indexCity(String idx, String name, String... latLons @Override public void setupSuiteScopeCluster() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); - prepareCreate("idx").setSettings(settings) - .setMapping("location", "type=geo_point", "city", "type=keyword") - .get(); + prepareCreate("idx").setSettings(settings).setMapping("location", "type=geo_point", "city", "type=keyword").get(); - prepareCreate("idx-multi") - .setMapping("location", "type=geo_point", "city", "type=keyword") - .get(); + prepareCreate("idx-multi").setMapping("location", "type=geo_point", "city", "type=keyword").get(); createIndex("idx_unmapped"); List cities = new ArrayList<>(); - cities.addAll(Arrays.asList( + cities.addAll( + Arrays.asList( // below 500km indexCity("idx", "utrecht", "52.0945, 5.116"), indexCity("idx", "haarlem", "52.3890, 4.637"), @@ -88,7 +85,9 @@ public void setupSuiteScopeCluster() throws Exception { indexCity("idx", "berlin", "52.540, 13.409"), indexCity("idx", "prague", "50.097679, 14.441314"), // above 1000km - indexCity("idx", "tel-aviv", "32.0741, 34.777"))); + indexCity("idx", "tel-aviv", "32.0741, 34.777") + ) + ); // random cities with no location for (String cityName : Arrays.asList("london", "singapour", "tokyo", "milan")) { @@ -99,28 +98,30 @@ public void setupSuiteScopeCluster() throws Exception { indexRandom(true, cities); cities.clear(); - cities.addAll(Arrays.asList( + cities.addAll( + Arrays.asList( // first point is within the ~17.5km, the second is ~710km indexCity("idx-multi", "city1", "52.3890, 4.637", "50.097679,14.441314"), // first point is ~576km, the second is within the ~35km indexCity("idx-multi", "city2", "52.540, 13.409", "52.0945, 5.116"), // above 1000km - indexCity("idx-multi", "city3", "32.0741, 34.777"))); + indexCity("idx-multi", "city3", "32.0741, 34.777") + ) + ); // random cities with no location for (String cityName : Arrays.asList("london", "singapour", "tokyo", "milan")) { cities.add(indexCity("idx-multi", cityName)); } indexRandom(true, cities); - prepareCreate("empty_bucket_idx") - .setMapping("value", "type=integer", "location", "type=geo_point").get(); + prepareCreate("empty_bucket_idx").setMapping("value", "type=integer", "location", "type=geo_point").get(); List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource(jsonBuilder() - .startObject() - .field("value", i * 2) - .field("location", "52.0945, 5.116") - .endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field("value", i * 2).field("location", "52.0945, 5.116").endObject()) + ); } indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()])); ensureSearchable(); @@ -128,17 +129,17 @@ public void setupSuiteScopeCluster() throws Exception { public void testSimple() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) - .field("location") - .unit(DistanceUnit.KILOMETERS) - .addUnboundedTo(500) - .addRange(500, 1000) - .addUnboundedFrom(1000)) - .get(); + .addAggregation( + geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") + .unit(DistanceUnit.KILOMETERS) + .addUnboundedTo(500) + .addRange(500, 1000) + .addUnboundedFrom(1000) + ) + .get(); assertSearchResponse(response); - Range geoDist = response.getAggregations().get("amsterdam_rings"); assertThat(geoDist, notNullValue()); assertThat(geoDist.getName(), equalTo("amsterdam_rings")); @@ -175,17 +176,17 @@ public void testSimple() throws Exception { public void testSimpleWithCustomKeys() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) - .field("location") - .unit(DistanceUnit.KILOMETERS) - .addUnboundedTo("ring1", 500) - .addRange("ring2", 500, 1000) - .addUnboundedFrom("ring3", 1000)) - .get(); + .addAggregation( + geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") + .unit(DistanceUnit.KILOMETERS) + .addUnboundedTo("ring1", 500) + .addRange("ring2", 500, 1000) + .addUnboundedFrom("ring3", 1000) + ) + .get(); assertSearchResponse(response); - Range geoDist = response.getAggregations().get("amsterdam_rings"); assertThat(geoDist, notNullValue()); assertThat(geoDist.getName(), equalTo("amsterdam_rings")); @@ -224,17 +225,17 @@ public void testUnmapped() throws Exception { client().admin().cluster().prepareHealth("idx_unmapped").setWaitForYellowStatus().get(); SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) - .field("location") - .unit(DistanceUnit.KILOMETERS) - .addUnboundedTo(500) - .addRange(500, 1000) - .addUnboundedFrom(1000)) - .get(); + .addAggregation( + geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") + .unit(DistanceUnit.KILOMETERS) + .addUnboundedTo(500) + .addRange(500, 1000) + .addUnboundedFrom(1000) + ) + .get(); assertSearchResponse(response); - Range geoDist = response.getAggregations().get("amsterdam_rings"); assertThat(geoDist, notNullValue()); assertThat(geoDist.getName(), equalTo("amsterdam_rings")); @@ -271,17 +272,17 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) - .field("location") - .unit(DistanceUnit.KILOMETERS) - .addUnboundedTo(500) - .addRange(500, 1000) - .addUnboundedFrom(1000)) - .get(); + .addAggregation( + geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") + .unit(DistanceUnit.KILOMETERS) + .addUnboundedTo(500) + .addRange(500, 1000) + .addUnboundedFrom(1000) + ) + .get(); assertSearchResponse(response); - Range geoDist = response.getAggregations().get("amsterdam_rings"); assertThat(geoDist, notNullValue()); assertThat(geoDist.getName(), equalTo("amsterdam_rings")); @@ -318,28 +319,27 @@ public void testPartiallyUnmapped() throws Exception { public void testWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) - .field("location") - .unit(DistanceUnit.KILOMETERS) - .addUnboundedTo(500) - .addRange(500, 1000) - .addUnboundedFrom(1000) - .subAggregation(terms("cities").field("city") - .collectMode(randomFrom(SubAggCollectionMode.values())))) - .get(); + .addAggregation( + geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") + .unit(DistanceUnit.KILOMETERS) + .addUnboundedTo(500) + .addRange(500, 1000) + .addUnboundedFrom(1000) + .subAggregation(terms("cities").field("city").collectMode(randomFrom(SubAggCollectionMode.values()))) + ) + .get(); assertSearchResponse(response); - Range geoDist = response.getAggregations().get("amsterdam_rings"); assertThat(geoDist, notNullValue()); assertThat(geoDist.getName(), equalTo("amsterdam_rings")); List buckets = geoDist.getBuckets(); assertThat(geoDist.getBuckets().size(), equalTo(3)); - assertThat(((InternalAggregation)geoDist).getProperty("_bucket_count"), equalTo(3)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)geoDist).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)geoDist).getProperty("_count"); - Object[] propertiesCities = (Object[]) ((InternalAggregation)geoDist).getProperty("cities"); + assertThat(((InternalAggregation) geoDist).getProperty("_bucket_count"), equalTo(3)); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) geoDist).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) geoDist).getProperty("_count"); + Object[] propertiesCities = (Object[]) ((InternalAggregation) geoDist).getProperty("cities"); Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); @@ -404,11 +404,14 @@ public void testWithSubAggregation() throws Exception { public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) - .subAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)).field("location") - .addRange("0-100", 0.0, 100.0))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)).field("location").addRange("0-100", 0.0, 100.0)) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -430,13 +433,11 @@ public void testEmptyAggregation() throws Exception { assertThat(buckets.get(0).getDocCount(), equalTo(0L)); } - public void testNoRangesInQuery() { + public void testNoRangesInQuery() { try { - client().prepareSearch("idx") - .addAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)).field("location")) - .get(); + client().prepareSearch("idx").addAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)).field("location")).get(); fail(); - } catch (SearchPhaseExecutionException spee){ + } catch (SearchPhaseExecutionException spee) { Throwable rootCause = spee.getCause().getCause(); assertThat(rootCause, instanceOf(IllegalArgumentException.class)); assertEquals(rootCause.getMessage(), "No [ranges] specified for the [geo_dist] aggregation"); @@ -445,14 +446,15 @@ public void testNoRangesInQuery() { public void testMultiValues() throws Exception { SearchResponse response = client().prepareSearch("idx-multi") - .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) - .field("location") - .unit(DistanceUnit.KILOMETERS) - .distanceType(org.elasticsearch.common.geo.GeoDistance.ARC) - .addUnboundedTo(500) - .addRange(500, 1000) - .addUnboundedFrom(1000)) - .get(); + .addAggregation( + geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") + .unit(DistanceUnit.KILOMETERS) + .distanceType(org.elasticsearch.common.geo.GeoDistance.ARC) + .addUnboundedTo(500) + .addRange(500, 1000) + .addUnboundedFrom(1000) + ) + .get(); assertSearchResponse(response); @@ -490,6 +492,4 @@ public void testMultiValues() throws Exception { assertThat(bucket.getDocCount(), equalTo(1L)); } - - } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java index a8584bd5a55ab..f7f4bd7794460 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java @@ -10,6 +10,7 @@ import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.ObjectIntMap; import com.carrotsearch.hppc.cursors.ObjectIntCursor; + import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -33,9 +34,9 @@ import java.util.Random; import java.util.Set; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.geometry.utils.Geohash.PRECISION; import static org.elasticsearch.geometry.utils.Geohash.stringEncode; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.geohashGrid; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; @@ -77,21 +78,20 @@ public void setupSuiteScopeCluster() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); - assertAcked(prepareCreate("idx").setSettings(settings) - .setMapping("location", "type=geo_point", "city", "type=keyword")); + assertAcked(prepareCreate("idx").setSettings(settings).setMapping("location", "type=geo_point", "city", "type=keyword")); List cities = new ArrayList<>(); Random random = random(); expectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2); for (int i = 0; i < numDocs; i++) { - //generate random point + // generate random point double lat = (180d * random.nextDouble()) - 90d; double lng = (360d * random.nextDouble()) - 180d; String randomGeoHash = stringEncode(lng, lat, PRECISION); - //Index at the highest resolution + // Index at the highest resolution cities.add(indexCity("idx", randomGeoHash, lat + ", " + lng)); expectedDocCountsForGeoHash.put(randomGeoHash, expectedDocCountsForGeoHash.getOrDefault(randomGeoHash, 0) + 1); - //Update expected doc counts for all resolutions.. + // Update expected doc counts for all resolutions.. for (int precision = PRECISION - 1; precision > 0; precision--) { String hash = stringEncode(lng, lat, precision); if ((smallestGeoHash == null) || (hash.length() < smallestGeoHash.length())) { @@ -102,8 +102,9 @@ public void setupSuiteScopeCluster() throws Exception { } indexRandom(true, cities); - assertAcked(prepareCreate("multi_valued_idx").setSettings(settings) - .setMapping("location", "type=geo_point", "city", "type=keyword")); + assertAcked( + prepareCreate("multi_valued_idx").setSettings(settings).setMapping("location", "type=geo_point", "city", "type=keyword") + ); cities = new ArrayList<>(); multiValuedExpectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2); @@ -134,18 +135,15 @@ public void setupSuiteScopeCluster() throws Exception { public void testSimple() throws Exception { for (int precision = 1; precision <= PRECISION; precision++) { SearchResponse response = client().prepareSearch("idx") - .addAggregation(geohashGrid("geohashgrid") - .field("location") - .precision(precision) - ) - .get(); + .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) + .get(); assertSearchResponse(response); GeoGrid geoGrid = response.getAggregations().get("geohashgrid"); List buckets = geoGrid.getBuckets(); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)geoGrid).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)geoGrid).getProperty("_count"); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) geoGrid).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) geoGrid).getProperty("_count"); for (int i = 0; i < buckets.size(); i++) { GeoGrid.Bucket cell = buckets.get(i); String geohash = cell.getKeyAsString(); @@ -153,8 +151,7 @@ public void testSimple() throws Exception { long bucketCount = cell.getDocCount(); int expectedBucketCount = expectedDocCountsForGeoHash.get(geohash); assertNotSame(bucketCount, 0); - assertEquals("Geohash " + geohash + " has wrong doc count ", - expectedBucketCount, bucketCount); + assertEquals("Geohash " + geohash + " has wrong doc count ", expectedBucketCount, bucketCount); GeoPoint geoPoint = (GeoPoint) propertiesKeys[i]; assertThat(stringEncode(geoPoint.lon(), geoPoint.lat(), precision), equalTo(geohash)); assertThat((long) propertiesDocCounts[i], equalTo(bucketCount)); @@ -165,11 +162,8 @@ public void testSimple() throws Exception { public void testMultivalued() throws Exception { for (int precision = 1; precision <= PRECISION; precision++) { SearchResponse response = client().prepareSearch("multi_valued_idx") - .addAggregation(geohashGrid("geohashgrid") - .field("location") - .precision(precision) - ) - .get(); + .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) + .get(); assertSearchResponse(response); @@ -180,8 +174,7 @@ public void testMultivalued() throws Exception { long bucketCount = cell.getDocCount(); int expectedBucketCount = multiValuedExpectedDocCountsForGeoHash.get(geohash); assertNotSame(bucketCount, 0); - assertEquals("Geohash " + geohash + " has wrong doc count ", - expectedBucketCount, bucketCount); + assertEquals("Geohash " + geohash + " has wrong doc count ", expectedBucketCount, bucketCount); } } } @@ -191,15 +184,11 @@ public void testFiltered() throws Exception { bbox.setCorners(smallestGeoHash).queryName("bbox"); for (int precision = 1; precision <= PRECISION; precision++) { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - AggregationBuilders.filter("filtered", bbox) - .subAggregation( - geohashGrid("geohashgrid") - .field("location") - .precision(precision) - ) - ) - .get(); + .addAggregation( + AggregationBuilders.filter("filtered", bbox) + .subAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) + ) + .get(); assertSearchResponse(response); @@ -212,8 +201,7 @@ public void testFiltered() throws Exception { int expectedBucketCount = expectedDocCountsForGeoHash.get(geohash); assertNotSame(bucketCount, 0); assertTrue("Buckets must be filtered", geohash.startsWith(smallestGeoHash)); - assertEquals("Geohash " + geohash + " has wrong doc count ", - expectedBucketCount, bucketCount); + assertEquals("Geohash " + geohash + " has wrong doc count ", expectedBucketCount, bucketCount); } } @@ -222,11 +210,8 @@ public void testFiltered() throws Exception { public void testUnmapped() throws Exception { for (int precision = 1; precision <= PRECISION; precision++) { SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(geohashGrid("geohashgrid") - .field("location") - .precision(precision) - ) - .get(); + .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) + .get(); assertSearchResponse(response); @@ -239,11 +224,8 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() throws Exception { for (int precision = 1; precision <= PRECISION; precision++) { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(geohashGrid("geohashgrid") - .field("location") - .precision(precision) - ) - .get(); + .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) + .get(); assertSearchResponse(response); @@ -254,8 +236,7 @@ public void testPartiallyUnmapped() throws Exception { long bucketCount = cell.getDocCount(); int expectedBucketCount = expectedDocCountsForGeoHash.get(geohash); assertNotSame(bucketCount, 0); - assertEquals("Geohash " + geohash + " has wrong doc count ", - expectedBucketCount, bucketCount); + assertEquals("Geohash " + geohash + " has wrong doc count ", expectedBucketCount, bucketCount); } } } @@ -263,18 +244,13 @@ public void testPartiallyUnmapped() throws Exception { public void testTopMatch() throws Exception { for (int precision = 1; precision <= PRECISION; precision++) { SearchResponse response = client().prepareSearch("idx") - .addAggregation(geohashGrid("geohashgrid") - .field("location") - .size(1) - .shardSize(100) - .precision(precision) - ) - .get(); + .addAggregation(geohashGrid("geohashgrid").field("location").size(1).shardSize(100).precision(precision)) + .get(); assertSearchResponse(response); GeoGrid geoGrid = response.getAggregations().get("geohashgrid"); - //Check we only have one bucket with the best match for that resolution + // Check we only have one bucket with the best match for that resolution assertThat(geoGrid.getBuckets().size(), equalTo(1)); for (GeoGrid.Bucket cell : geoGrid.getBuckets()) { String geohash = cell.getKeyAsString(); @@ -286,8 +262,7 @@ public void testTopMatch() throws Exception { } } assertNotSame(bucketCount, 0); - assertEquals("Geohash " + geohash + " has wrong doc count ", - expectedBucketCount, bucketCount); + assertEquals("Geohash " + geohash + " has wrong doc count ", expectedBucketCount, bucketCount); } } } @@ -295,19 +270,24 @@ public void testTopMatch() throws Exception { public void testSizeIsZero() { final int size = 0; final int shardSize = 10000; - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> client().prepareSearch("idx") - .addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize)).get()); + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("idx") + .addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize)) + .get() + ); assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [geohashgrid]")); } public void testShardSizeIsZero() { final int size = 100; final int shardSize = 0; - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> client().prepareSearch("idx") - .addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize)) - .get()); + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("idx") + .addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize)) + .get() + ); assertThat(exception.getMessage(), containsString("[shardSize] must be greater than 0. Found [0] in [geohashgrid]")); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java index 7137f8bc7b0f9..417bf7f9d382f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java @@ -40,19 +40,20 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); numDocs = randomIntBetween(3, 20); for (int i = 0; i < numDocs / 2; i++) { - builders.add(client().prepareIndex("idx").setId(""+i+1).setSource(jsonBuilder() - .startObject() - .field("value", i + 1) - .field("tag", "tag1") - .endObject())); + builders.add( + client().prepareIndex("idx") + .setId("" + i + 1) + .setSource(jsonBuilder().startObject().field("value", i + 1).field("tag", "tag1").endObject()) + ); } for (int i = numDocs / 2; i < numDocs; i++) { - builders.add(client().prepareIndex("idx").setId(""+i+1).setSource(jsonBuilder() - .startObject() - .field("value", i + 1) - .field("tag", "tag2") - .field("name", "name" + i+1) - .endObject())); + builders.add( + client().prepareIndex("idx") + .setId("" + i + 1) + .setSource( + jsonBuilder().startObject().field("value", i + 1).field("tag", "tag2").field("name", "name" + i + 1).endObject() + ) + ); } indexRandom(true, builders); ensureSearchable(); @@ -60,23 +61,21 @@ public void setupSuiteScopeCluster() throws Exception { public void testWithStatsSubAggregator() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.termQuery("tag", "tag1")) - .addAggregation(global("global") - .subAggregation(stats("value_stats").field("value"))) - .get(); + .setQuery(QueryBuilders.termQuery("tag", "tag1")) + .addAggregation(global("global").subAggregation(stats("value_stats").field("value"))) + .get(); assertSearchResponse(response); - Global global = response.getAggregations().get("global"); assertThat(global, notNullValue()); assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo((long) numDocs)); - assertThat((long) ((InternalAggregation)global).getProperty("_count"), equalTo((long) numDocs)); + assertThat((long) ((InternalAggregation) global).getProperty("_count"), equalTo((long) numDocs)); assertThat(global.getAggregations().asList().isEmpty(), is(false)); Stats stats = global.getAggregations().get("value_stats"); - assertThat((Stats) ((InternalAggregation)global).getProperty("value_stats"), sameInstance(stats)); + assertThat((Stats) ((InternalAggregation) global).getProperty("value_stats"), sameInstance(stats)); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("value_stats")); long sum = 0; @@ -93,13 +92,14 @@ public void testWithStatsSubAggregator() throws Exception { public void testNonTopLevel() throws Exception { try { client().prepareSearch("idx") - .setQuery(QueryBuilders.termQuery("tag", "tag1")) - .addAggregation(global("global") - .subAggregation(global("inner_global"))) - .get(); + .setQuery(QueryBuilders.termQuery("tag", "tag1")) + .addAggregation(global("global").subAggregation(global("inner_global"))) + .get(); - fail("expected to fail executing non-top-level global aggregator. global aggregations are only allowed as top level" + - "aggregations"); + fail( + "expected to fail executing non-top-level global aggregator. global aggregations are only allowed as top level" + + "aggregations" + ); } catch (ElasticsearchException e) { assertThat(e.getMessage(), is("all shards failed")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java index 108fcb36900ba..940b325c16991 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java @@ -139,23 +139,31 @@ public void setupSuiteScopeCluster() throws Exception { List builders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { - builders.add(client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, i + 1) - .startArray(MULTI_VALUED_FIELD_NAME).value(i + 1).value(i + 2).endArray() - .field("tag", "tag" + i) - .field("constant", 1) - .endObject())); + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, i + 1) + .startArray(MULTI_VALUED_FIELD_NAME) + .value(i + 1) + .value(i + 2) + .endArray() + .field("tag", "tag" + i) + .field("constant", 1) + .endObject() + ) + ); } getMultiSortDocs(builders); assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, i * 2) - .endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); @@ -180,36 +188,55 @@ private void getMultiSortDocs(List builders) throws IOExcep addExpectedBucket(6, 1, 5, 1); addExpectedBucket(7, 1, 5, 1); - assertAcked(client().admin().indices().prepareCreate("sort_idx") - .setMapping(SINGLE_VALUED_FIELD_NAME, "type=double").get()); + assertAcked(client().admin().indices().prepareCreate("sort_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=double").get()); for (int i = 1; i <= 3; i++) { - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 1).field("l", 1).field("d", i).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 2).field("l", 2).field("d", i).endObject())); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 1).field("l", 1).field("d", i).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 2).field("l", 2).field("d", i).endObject()) + ); } - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3.8).field("l", 3).field("d", 2).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4.4).field("l", 3).field("d", 3).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5.1).field("l", 5).field("d", 2).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 6).field("l", 5).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 7).field("l", 5).field("d", 1).endObject())); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3.8).field("l", 3).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4.4).field("l", 3).field("d", 3).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5.1).field("l", 5).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 6).field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 7).field("l", 5).field("d", 1).endObject()) + ); } public void testSingleValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) - .get(); + .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) + .get(); assertSearchResponse(response); @@ -230,13 +257,12 @@ public void testSingleValuedField() throws Exception { public void singleValuedField_withOffset() throws Exception { int interval1 = 10; int offset = 5; - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval1).offset(offset)) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval1).offset(offset)) + .get(); // from setup we have between 6 and 20 documents, each with value 1 in test field - int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; + int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); @@ -251,7 +277,7 @@ public void singleValuedField_withOffset() throws Exception { // last bucket should have (numDocs % interval + 1) docs bucket = histo.getBuckets().get(0); assertThat(bucket, notNullValue()); - assertThat(((Number) bucket.getKey()).longValue(), equalTo(numDocs%interval1 + 5L)); + assertThat(((Number) bucket.getKey()).longValue(), equalTo(numDocs % interval1 + 5L)); assertThat(bucket.getDocCount(), equalTo((numDocs % interval) + 1L)); } @@ -261,14 +287,13 @@ public void singleValuedField_withOffset() throws Exception { */ public void testSingleValuedFieldWithRandomOffset() throws Exception { int offset = randomIntBetween(2, interval); - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).offset(offset)) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).offset(offset)) + .get(); assertSearchResponse(response); // shifting by offset>2 creates new extra bucket [0,offset-1] // if offset is >= number of values in original last bucket, that effect is canceled - int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; + int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -279,13 +304,13 @@ public void testSingleValuedFieldWithRandomOffset() throws Exception { for (int i = 0; i < expectedNumberOfBuckets; ++i) { Histogram.Bucket bucket = histo.getBuckets().get(i); assertThat(bucket, notNullValue()); - assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) ((i-1) * interval + offset))); - if (i==0) { + assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) ((i - 1) * interval + offset))); + if (i == 0) { // first bucket - long expectedFirstBucketCount = offset-1; + long expectedFirstBucketCount = offset - 1; assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount)); docsCounted += expectedFirstBucketCount; - } else if(i buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { @@ -436,17 +459,16 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .order(BucketOrder.aggregation("sum", true)) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(BucketOrder.aggregation("sum", true)) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); assertSearchResponse(response); - Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); @@ -479,17 +501,16 @@ public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .order(BucketOrder.aggregation("sum", false)) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(BucketOrder.aggregation("sum", false)) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); assertSearchResponse(response); - Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); @@ -522,17 +543,16 @@ public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .order(BucketOrder.aggregation("stats.sum", false)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))) - .get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(BucketOrder.aggregation("stats.sum", false)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); assertSearchResponse(response); - Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); @@ -567,18 +587,16 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws public void testSingleValuedFieldOrderedBySubAggregationDescDeepOrderPath() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .order(BucketOrder.aggregation("filter>max", asc)) - .subAggregation(filter("filter", matchAllQuery()) - .subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)))) - .get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(BucketOrder.aggregation("filter>max", asc)) + .subAggregation(filter("filter", matchAllQuery()).subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME))) + ) + .get(); assertSearchResponse(response); - Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); @@ -607,11 +625,12 @@ public void testSingleValuedFieldOrderedBySubAggregationDescDeepOrderPath() thro public void testSingleValuedFieldOrderedByTieBreaker() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .order(BucketOrder.aggregation("max_constant", randomBoolean())) - .subAggregation(max("max_constant").field("constant"))) + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(BucketOrder.aggregation("max_constant", randomBoolean())) + .subAggregation(max("max_constant").field("constant")) + ) .get(); assertSearchResponse(response); @@ -633,16 +652,17 @@ public void testSingleValuedFieldOrderedByTieBreaker() throws Exception { public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { boolean asc = true; try { - client() - .prepareSearch("idx") + client().prepareSearch("idx") .addAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .order(BucketOrder.aggregation("inner_histo>avg", asc)) - .subAggregation(histogram("inner_histo") - .interval(interval) - .field(MULTI_VALUED_FIELD_NAME) - .subAggregation(avg("avg").field("value")))) + .subAggregation( + histogram("inner_histo").interval(interval) + .field(MULTI_VALUED_FIELD_NAME) + .subAggregation(avg("avg").field("value")) + ) + ) .get(); fail("Expected an exception"); } catch (SearchPhaseExecutionException e) { @@ -663,12 +683,12 @@ public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) - .interval(interval)) - .get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) + .interval(interval) + ) + .get(); assertSearchResponse(response); @@ -695,12 +715,11 @@ public void testSingleValuedFieldWithValueScript() throws Exception { public void testMultiValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval)) - .get(); + .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval)) + .get(); assertSearchResponse(response); - Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); @@ -717,12 +736,11 @@ public void testMultiValuedField() throws Exception { public void testMultiValuedFieldOrderedByKeyDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.key(false))) - .get(); + .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.key(false))) + .get(); assertSearchResponse(response); - Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); @@ -739,12 +757,12 @@ public void testMultiValuedFieldOrderedByKeyDesc() throws Exception { public void testMultiValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(MULTI_VALUED_FIELD_NAME) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) - .interval(interval)) - .get(); + .addAggregation( + histogram("histo").field(MULTI_VALUED_FIELD_NAME) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) + .interval(interval) + ) + .get(); assertSearchResponse(response); @@ -776,11 +794,11 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testScriptSingleValue() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['l_value'].value", emptyMap())) - .interval(interval)) - .get(); + .addAggregation( + histogram("histo").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['l_value'].value", emptyMap())) + .interval(interval) + ) + .get(); assertSearchResponse(response); @@ -800,11 +818,11 @@ public void testScriptSingleValue() throws Exception { public void testScriptMultiValued() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['l_values']", emptyMap())) - .interval(interval)) - .get(); + .addAggregation( + histogram("histo").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['l_values']", emptyMap())) + .interval(interval) + ) + .get(); assertSearchResponse(response); @@ -824,12 +842,11 @@ public void testScriptMultiValued() throws Exception { public void testUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) - .get(); + .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) + .get(); assertSearchResponse(response); - Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); @@ -838,12 +855,11 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) - .get(); + .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) + .get(); assertSearchResponse(response); - Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); @@ -859,14 +875,13 @@ public void testPartiallyUnmapped() throws Exception { } public void testPartiallyUnmappedWithExtendedBounds() throws Exception { - SearchResponse response = client() - .prepareSearch("idx", "idx_unmapped") - .addAggregation( - histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .extendedBounds(-1 * 2 * interval, valueCounts.length * interval)) - .get(); + SearchResponse response = client().prepareSearch("idx", "idx_unmapped") + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(-1 * 2 * interval, valueCounts.length * interval) + ) + .get(); assertSearchResponse(response); @@ -896,10 +911,14 @@ public void testPartiallyUnmappedWithExtendedBounds() throws Exception { public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L).minDocCount(0) - .subAggregation(histogram("sub_histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(1L) + .minDocCount(0) + .subAggregation(histogram("sub_histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L)) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -939,7 +958,6 @@ public void testSingleValuedFieldWithExtendedBounds() throws Exception { long boundsMaxKey = lastDataBucketKey + boundsMaxKeyDelta; long boundsMax = boundsMaxKey + randomIntBetween(0, interval - 1); - // it could be that the random bounds.min we chose ended up greater than bounds.max - this should cause an // error boolean invalidBoundsError = boundsMin > boundsMax; @@ -952,12 +970,13 @@ public void testSingleValuedFieldWithExtendedBounds() throws Exception { SearchResponse response = null; try { response = client().prepareSearch("idx") - .addAggregation(histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .minDocCount(0) - .extendedBounds(boundsMin, boundsMax)) - .get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .minDocCount(0) + .extendedBounds(boundsMin, boundsMax) + ) + .get(); if (invalidBoundsError) { fail("Expected an exception to be thrown when bounds.min is greater than bounds.max"); @@ -1015,7 +1034,6 @@ public void testEmptyWithExtendedBounds() throws Exception { long boundsMaxKey = lastDataBucketKey + boundsMaxKeyDelta; long boundsMax = boundsMaxKey + randomIntBetween(0, interval - 1); - // it could be that the random bounds.min we chose ended up greater than bounds.max - this should cause an // error boolean invalidBoundsError = boundsMin > boundsMax; @@ -1028,13 +1046,14 @@ public void testEmptyWithExtendedBounds() throws Exception { SearchResponse response = null; try { response = client().prepareSearch("idx") - .setQuery(QueryBuilders.termQuery("foo", "bar")) - .addAggregation(histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .minDocCount(0) - .extendedBounds(boundsMin, boundsMax)) - .get(); + .setQuery(QueryBuilders.termQuery("foo", "bar")) + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .minDocCount(0) + .extendedBounds(boundsMin, boundsMax) + ) + .get(); if (invalidBoundsError) { fail("Expected an exception to be thrown when bounds.min is greater than bounds.max"); @@ -1073,7 +1092,8 @@ public void testEmptyWithExtendedBounds() throws Exception { public void testExeptionOnNegativerInterval() { try { client().prepareSearch("empty_bucket_idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(-1).minDocCount(0)).get(); + .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(-1).minDocCount(0)) + .get(); fail(); } catch (IllegalArgumentException e) { assertThat(e.toString(), containsString("[interval] must be >0 for histogram aggregation [histo]")); @@ -1082,13 +1102,15 @@ public void testExeptionOnNegativerInterval() { public void testDecimalIntervalAndOffset() throws Exception { assertAcked(prepareCreate("decimal_values").setMapping("d", "type=float").get()); - indexRandom(true, - client().prepareIndex("decimal_values").setId("1").setSource("d", -0.6), - client().prepareIndex("decimal_values").setId("2").setSource("d", 0.1)); + indexRandom( + true, + client().prepareIndex("decimal_values").setId("1").setSource("d", -0.6), + client().prepareIndex("decimal_values").setId("2").setSource("d", 0.1) + ); SearchResponse r = client().prepareSearch("decimal_values") - .addAggregation(histogram("histo").field("d").interval(0.7).offset(0.05)) - .get(); + .addAggregation(histogram("histo").field("d").interval(0.7).offset(0.05)) + .get(); assertSearchResponse(r); Histogram histogram = r.getAggregations().get("histo"); @@ -1105,48 +1127,140 @@ public void testDecimalIntervalAndOffset() throws Exception { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=float") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=float") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("d", -0.6), - client().prepareIndex("cache_test_idx").setId("2").setSource("d", 0.1)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("d", -0.6), + client().prepareIndex("cache_test_idx").setId("2").setSource("d", 0.1) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(histogram("histo").field("d") - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", emptyMap())).interval(0.7).offset(0.05)) - .get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + histogram("histo").field("d") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", emptyMap())) + .interval(0.7) + .offset(0.05) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(histogram("histo").field("d") - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())).interval(0.7).offset(0.05)).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + histogram("histo").field("d") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) + .interval(0.7) + .offset(0.05) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(histogram("histo").field("d").interval(0.7).offset(0.05)) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(histogram("histo").field("d").interval(0.7).offset(0.05)) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndKeyDesc() throws Exception { @@ -1155,54 +1269,70 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndKeyDesc } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndKeyAsc() throws Exception { - long[] expectedKeys = new long[] { 1, 2, 3, 4, 5, 6, 7 }; + long[] expectedKeys = new long[] { 1, 2, 3, 4, 5, 6, 7 }; assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true), BucketOrder.key(true)); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationDescAndKeyAsc() throws Exception { - long[] expectedKeys = new long[] { 5, 6, 7, 3, 4, 2, 1 }; + long[] expectedKeys = new long[] { 5, 6, 7, 3, 4, 2, 1 }; assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", false), BucketOrder.key(true)); } public void testSingleValuedFieldOrderedByCountAscAndSingleValueSubAggregationAsc() throws Exception { - long[] expectedKeys = new long[] { 6, 7, 3, 4, 5, 1, 2 }; + long[] expectedKeys = new long[] { 6, 7, 3, 4, 5, 1, 2 }; assertMultiSortResponse(expectedKeys, BucketOrder.count(true), BucketOrder.aggregation("avg_l", true)); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscSingleValueSubAggregationAsc() throws Exception { - long[] expectedKeys = new long[] { 6, 7, 3, 5, 4, 1, 2 }; + long[] expectedKeys = new long[] { 6, 7, 3, 5, 4, 1, 2 }; assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("sum_d", true), BucketOrder.aggregation("avg_l", true)); } public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception { - long[] expectedKeys = new long[] { 2, 1, 4, 5, 3, 6, 7 }; - assertMultiSortResponse(expectedKeys, BucketOrder.count(false), BucketOrder.aggregation("sum_d", false), - BucketOrder.aggregation("avg_l", false)); + long[] expectedKeys = new long[] { 2, 1, 4, 5, 3, 6, 7 }; + assertMultiSortResponse( + expectedKeys, + BucketOrder.count(false), + BucketOrder.aggregation("sum_d", false), + BucketOrder.aggregation("avg_l", false) + ); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception { - long[] expectedKeys = new long[] { 1, 2, 3, 4, 5, 6, 7 }; + long[] expectedKeys = new long[] { 1, 2, 3, 4, 5, 6, 7 }; assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true)); } public void testInvalidBounds() { - SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("empty_bucket_idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).hardBounds(new DoubleBounds(0.0, 10.0)) - .extendedBounds(3, 20)).get()); - assertThat(e.toString(), containsString("Extended bounds have to be inside hard bounds, hard bounds")); - - e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("empty_bucket_idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).hardBounds(new DoubleBounds(3.0, null)) - .extendedBounds(0, 20)).get()); + SearchPhaseExecutionException e = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch("empty_bucket_idx") + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).hardBounds(new DoubleBounds(0.0, 10.0)).extendedBounds(3, 20) + ) + .get() + ); + assertThat(e.toString(), containsString("Extended bounds have to be inside hard bounds, hard bounds")); + + e = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch("empty_bucket_idx") + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).hardBounds(new DoubleBounds(3.0, null)).extendedBounds(0, 20) + ) + .get() + ); assertThat(e.toString(), containsString("Extended bounds have to be inside hard bounds, hard bounds")); } public void testHardBounds() throws Exception { assertAcked(prepareCreate("test").setMapping("d", "type=double").get()); - indexRandom(true, + indexRandom( + true, client().prepareIndex("test").setId("1").setSource("d", -0.6), client().prepareIndex("test").setId("2").setSource("d", 0.5), - client().prepareIndex("test").setId("3").setSource("d", 0.1)); + client().prepareIndex("test").setId("3").setSource("d", 0.1) + ); SearchResponse r = client().prepareSearch("test") .addAggregation(histogram("histo").field("d").interval(0.1).hardBounds(new DoubleBounds(0.0, null))) @@ -1238,11 +1368,15 @@ public void testHardBounds() throws Exception { } private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) { - SearchResponse response = client() - .prepareSearch("sort_idx") + SearchResponse response = client().prepareSearch("sort_idx") .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1).order(BucketOrder.compound(order)) - .subAggregation(avg("avg_l").field("l")).subAggregation(sum("sum_d").field("d"))).get(); + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(1) + .order(BucketOrder.compound(order)) + .subAggregation(avg("avg_l").field("l")) + .subAggregation(sum("sum_d").field("d")) + ) + .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java index b7d00ade7da0f..22cf163c44005 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java @@ -46,20 +46,17 @@ protected Collection> nodePlugins() { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx") - .setMapping("ip", "type=ip", "ips", "type=ip")); + assertAcked(prepareCreate("idx").setMapping("ip", "type=ip", "ips", "type=ip")); waitForRelocation(ClusterHealthStatus.GREEN); - indexRandom(true, - client().prepareIndex("idx").setId("1").setSource( - "ip", "192.168.1.7", - "ips", Arrays.asList("192.168.0.13", "192.168.1.2")), - client().prepareIndex("idx").setId("2").setSource( - "ip", "192.168.1.10", - "ips", Arrays.asList("192.168.1.25", "192.168.1.28")), - client().prepareIndex("idx").setId("3").setSource( - "ip", "2001:db8::ff00:42:8329", - "ips", Arrays.asList("2001:db8::ff00:42:8329", "2001:db8::ff00:42:8380"))); + indexRandom( + true, + client().prepareIndex("idx").setId("1").setSource("ip", "192.168.1.7", "ips", Arrays.asList("192.168.0.13", "192.168.1.2")), + client().prepareIndex("idx").setId("2").setSource("ip", "192.168.1.10", "ips", Arrays.asList("192.168.1.25", "192.168.1.28")), + client().prepareIndex("idx") + .setId("3") + .setSource("ip", "2001:db8::ff00:42:8329", "ips", Arrays.asList("2001:db8::ff00:42:8329", "2001:db8::ff00:42:8380")) + ); assertAcked(prepareCreate("idx_unmapped")); waitForRelocation(ClusterHealthStatus.GREEN); @@ -67,12 +64,15 @@ public void setupSuiteScopeCluster() throws Exception { } public void testSingleValuedField() { - SearchResponse rsp = client().prepareSearch("idx").addAggregation( + SearchResponse rsp = client().prepareSearch("idx") + .addAggregation( AggregationBuilders.ipRange("my_range") .field("ip") .addUnboundedTo("192.168.1.0") .addRange("192.168.1.0", "192.168.1.10") - .addUnboundedFrom("192.168.1.10")).get(); + .addUnboundedFrom("192.168.1.10") + ) + .get(); assertSearchResponse(rsp); Range range = rsp.getAggregations().get("my_range"); assertEquals(3, range.getBuckets().size()); @@ -97,12 +97,15 @@ public void testSingleValuedField() { } public void testMultiValuedField() { - SearchResponse rsp = client().prepareSearch("idx").addAggregation( + SearchResponse rsp = client().prepareSearch("idx") + .addAggregation( AggregationBuilders.ipRange("my_range") .field("ips") .addUnboundedTo("192.168.1.0") .addRange("192.168.1.0", "192.168.1.10") - .addUnboundedFrom("192.168.1.10")).get(); + .addUnboundedFrom("192.168.1.10") + ) + .get(); assertSearchResponse(rsp); Range range = rsp.getAggregations().get("my_range"); assertEquals(3, range.getBuckets().size()); @@ -127,12 +130,15 @@ public void testMultiValuedField() { } public void testIpMask() { - SearchResponse rsp = client().prepareSearch("idx").addAggregation( + SearchResponse rsp = client().prepareSearch("idx") + .addAggregation( AggregationBuilders.ipRange("my_range") .field("ips") .addMaskRange("::/0") .addMaskRange("0.0.0.0/0") - .addMaskRange("2001:db8::/64")).get(); + .addMaskRange("2001:db8::/64") + ) + .get(); assertSearchResponse(rsp); Range range = rsp.getAggregations().get("my_range"); assertEquals(3, range.getBuckets().size()); @@ -151,12 +157,15 @@ public void testIpMask() { } public void testPartiallyUnmapped() { - SearchResponse rsp = client().prepareSearch("idx", "idx_unmapped").addAggregation( + SearchResponse rsp = client().prepareSearch("idx", "idx_unmapped") + .addAggregation( AggregationBuilders.ipRange("my_range") .field("ip") .addUnboundedTo("192.168.1.0") .addRange("192.168.1.0", "192.168.1.10") - .addUnboundedFrom("192.168.1.10")).get(); + .addUnboundedFrom("192.168.1.10") + ) + .get(); assertSearchResponse(rsp); Range range = rsp.getAggregations().get("my_range"); assertEquals(3, range.getBuckets().size()); @@ -181,12 +190,15 @@ public void testPartiallyUnmapped() { } public void testUnmapped() { - SearchResponse rsp = client().prepareSearch("idx_unmapped").addAggregation( + SearchResponse rsp = client().prepareSearch("idx_unmapped") + .addAggregation( AggregationBuilders.ipRange("my_range") .field("ip") .addUnboundedTo("192.168.1.0") .addRange("192.168.1.0", "192.168.1.10") - .addUnboundedFrom("192.168.1.10")).get(); + .addUnboundedFrom("192.168.1.10") + ) + .get(); assertSearchResponse(rsp); Range range = rsp.getAggregations().get("my_range"); assertEquals(3, range.getBuckets().size()); @@ -211,30 +223,37 @@ public void testUnmapped() { } public void testRejectsScript() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> client().prepareSearch("idx").addAggregation( - AggregationBuilders.ipRange("my_range") - .script(new Script(ScriptType.INLINE, "mockscript", "dummy", Collections.emptyMap())) ).get()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("idx") + .addAggregation( + AggregationBuilders.ipRange("my_range") + .script(new Script(ScriptType.INLINE, "mockscript", "dummy", Collections.emptyMap())) + ) + .get() + ); assertThat(e.getMessage(), containsString("[ip_range] does not support scripts")); } public void testRejectsValueScript() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> client().prepareSearch("idx").addAggregation( - AggregationBuilders.ipRange("my_range") + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("idx") + .addAggregation( + AggregationBuilders.ipRange("my_range") .field("ip") - .script(new Script(ScriptType.INLINE, "mockscript", "dummy", Collections.emptyMap())) ).get()); + .script(new Script(ScriptType.INLINE, "mockscript", "dummy", Collections.emptyMap())) + ) + .get() + ); assertThat(e.getMessage(), containsString("[ip_range] does not support scripts")); } - public void testNoRangesInQuery() { + public void testNoRangesInQuery() { try { - client().prepareSearch("idx").addAggregation( - AggregationBuilders.ipRange("my_range") - .field("ip")) - .get(); + client().prepareSearch("idx").addAggregation(AggregationBuilders.ipRange("my_range").field("ip")).get(); fail(); - } catch (SearchPhaseExecutionException spee){ + } catch (SearchPhaseExecutionException spee) { Throwable rootCause = spee.getCause().getCause(); assertThat(rootCause, instanceOf(IllegalArgumentException.class)); assertEquals(rootCause.getMessage(), "No [ranges] specified for the [my_range] aggregation"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java index 098c86c8661a9..7f3535d76a9aa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java @@ -38,12 +38,12 @@ protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); scripts.put("doc['ip'].value", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("ip"); }); scripts.put("doc['ip']", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return ((ScriptDocValues) doc.get("ip")).get(0); }); @@ -53,15 +53,17 @@ protected Map, Object>> pluginScripts() { public void testScriptValue() throws Exception { assertAcked(prepareCreate("index").setMapping("ip", "type=ip")); - indexRandom(true, - client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), - client().prepareIndex("index").setId("2").setSource("ip", "192.168.1.7"), - client().prepareIndex("index").setId("3").setSource("ip", "2001:db8::2:1")); - - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "doc['ip'].value", Collections.emptyMap()); - SearchResponse response = client().prepareSearch("index").addAggregation( - AggregationBuilders.terms("my_terms").script(script).executionHint(randomExecutionHint())).get(); + indexRandom( + true, + client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("2").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("3").setSource("ip", "2001:db8::2:1") + ); + + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['ip'].value", Collections.emptyMap()); + SearchResponse response = client().prepareSearch("index") + .addAggregation(AggregationBuilders.terms("my_terms").script(script).executionHint(randomExecutionHint())) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("my_terms"); assertEquals(2, terms.getBuckets().size()); @@ -79,15 +81,17 @@ public void testScriptValue() throws Exception { public void testScriptValues() throws Exception { assertAcked(prepareCreate("index").setMapping("ip", "type=ip")); - indexRandom(true, - client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), - client().prepareIndex("index").setId("2").setSource("ip", "192.168.1.7"), - client().prepareIndex("index").setId("3").setSource("ip", "2001:db8::2:1")); - - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "doc['ip']", Collections.emptyMap()); - SearchResponse response = client().prepareSearch("index").addAggregation( - AggregationBuilders.terms("my_terms").script(script).executionHint(randomExecutionHint())).get(); + indexRandom( + true, + client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("2").setSource("ip", "192.168.1.7"), + client().prepareIndex("index").setId("3").setSource("ip", "2001:db8::2:1") + ); + + Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['ip']", Collections.emptyMap()); + SearchResponse response = client().prepareSearch("index") + .addAggregation(AggregationBuilders.terms("my_terms").script(script).executionHint(randomExecutionHint())) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("my_terms"); assertEquals(2, terms.getBuckets().size()); @@ -105,13 +109,16 @@ public void testScriptValues() throws Exception { public void testMissingValue() throws Exception { assertAcked(prepareCreate("index").setMapping("ip", "type=ip")); - indexRandom(true, + indexRandom( + true, client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), client().prepareIndex("index").setId("2").setSource("ip", "192.168.1.7"), client().prepareIndex("index").setId("3").setSource("ip", "127.0.0.1"), - client().prepareIndex("index").setId("4").setSource("not_ip", "something")); - SearchResponse response = client().prepareSearch("index").addAggregation(AggregationBuilders - .terms("my_terms").field("ip").missing("127.0.0.1").executionHint(randomExecutionHint())).get(); + client().prepareIndex("index").setId("4").setSource("not_ip", "something") + ); + SearchResponse response = client().prepareSearch("index") + .addAggregation(AggregationBuilders.terms("my_terms").field("ip").missing("127.0.0.1").executionHint(randomExecutionHint())) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("my_terms"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java index 00a738ee8d58b..f0e6fbde555e2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java @@ -109,22 +109,32 @@ public void setupSuiteScopeCluster() throws Exception { createIndex("idx", "high_card_idx"); IndexRequestBuilder[] lowCardBuilders = new IndexRequestBuilder[NUM_DOCS]; for (int i = 0; i < lowCardBuilders.length; i++) { - lowCardBuilders[i] = client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, i) - .startArray(MULTI_VALUED_FIELD_NAME).value(i).value(i + 1).endArray() - .field("num_tag", i < lowCardBuilders.length / 2 + 1 ? 1 : 0) // used to test order by single-bucket sub agg - .field("constant", 1) - .endObject()); + lowCardBuilders[i] = client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, i) + .startArray(MULTI_VALUED_FIELD_NAME) + .value(i) + .value(i + 1) + .endArray() + .field("num_tag", i < lowCardBuilders.length / 2 + 1 ? 1 : 0) // used to test order by single-bucket sub agg + .field("constant", 1) + .endObject() + ); } indexRandom(true, lowCardBuilders); IndexRequestBuilder[] highCardBuilders = new IndexRequestBuilder[100]; // TODO randomize the size? for (int i = 0; i < highCardBuilders.length; i++) { - highCardBuilders[i] = client().prepareIndex("high_card_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, i) - .startArray(MULTI_VALUED_FIELD_NAME).value(i).value(i + 1).endArray() - .endObject()); + highCardBuilders[i] = client().prepareIndex("high_card_idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, i) + .startArray(MULTI_VALUED_FIELD_NAME) + .value(i) + .value(i + 1) + .endArray() + .endObject() + ); } indexRandom(true, highCardBuilders); @@ -133,10 +143,11 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId(""+i).setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, i * 2) - .endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); } getMultiSortDocs(builders); @@ -192,67 +203,47 @@ private void getMultiSortDocs(List builders) throws IOExcep createIndex("sort_idx"); for (int i = 1; i <= 3; i++) { - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 1) - .field("l", 1) - .field("d", i) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 2) - .field("l", 2) - .field("d", i) - .endObject())); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 1).field("l", 1).field("d", i).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 2).field("l", 2).field("d", i).endObject()) + ); } - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 3) - .field("l", 3) - .field("d", 1) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 3) - .field("l", 3) - .field("d", 2) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 4) - .field("l", 3) - .field("d", 1) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 4) - .field("l", 3) - .field("d", 3) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 5) - .field("l", 5) - .field("d", 1) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 5) - .field("l", 5) - .field("d", 2) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 6) - .field("l", 5) - .field("d", 1) - .endObject())); - builders.add(client().prepareIndex("sort_idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, 7) - .field("l", 5) - .field("d", 1) - .endObject())); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 3).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 6).field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 7).field("l", 5).field("d", 1).endObject()) + ); } private String key(Terms.Bucket bucket) { @@ -261,14 +252,17 @@ private String key(Terms.Bucket bucket) { // the main purpose of this test is to make sure we're not allocating 2GB of memory per shard public void testSizeIsZero() { - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> client().prepareSearch("high_card_idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("high_card_idx") + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) .minDocCount(randomInt(1)) - .size(0)) - .get()); + .size(0) + ) + .get() + ); assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [terms]")); } @@ -283,7 +277,8 @@ public void testMultiValueFieldWithPartitionedFiltering() throws Exception { private void runTestFieldWithPartitionedFiltering(String field) throws Exception { // Find total number of unique terms SearchResponse allResponse = client().prepareSearch("idx") - .addAggregation(terms("terms").field(field).collectMode(randomFrom(SubAggCollectionMode.values()))).get(); + .addAggregation(terms("terms").field(field).collectMode(randomFrom(SubAggCollectionMode.values()))) + .get(); assertSearchResponse(allResponse); Terms terms = allResponse.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -295,10 +290,12 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception Set foundTerms = new HashSet<>(); for (int partition = 0; partition < numPartitions; partition++) { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").field(field).includeExclude(new IncludeExclude(partition, numPartitions)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").field(field) + .includeExclude(new IncludeExclude(partition, numPartitions)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(response); terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -313,15 +310,15 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))) - .get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + ) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -330,23 +327,23 @@ public void testSingleValuedFieldWithValueScript() throws Exception { for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("" + (i + 1d)); assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (i+1d))); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i+1)); + assertThat(key(bucket), equalTo("" + (i + 1d))); + assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i + 1)); assertThat(bucket.getDocCount(), equalTo(1L)); } } public void testMultiValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value - 1", Collections.emptyMap()))) - .get(); + .addAggregation( + terms("terms").field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value - 1", Collections.emptyMap())) + ) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -355,8 +352,8 @@ public void testMultiValuedFieldWithValueScript() throws Exception { for (int i = 0; i < 6; i++) { Terms.Bucket bucket = terms.getBucketByKey("" + (i - 1d)); assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (i-1d))); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i-1)); + assertThat(key(bucket), equalTo("" + (i - 1d))); + assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i - 1)); if (i == 0 || i == 5) { assertThat(bucket.getDocCount(), equalTo(1L)); } else { @@ -367,16 +364,15 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script( - ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / 1000 + 1)", Collections.emptyMap()))) - .get(); + .addAggregation( + terms("terms").field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / 1000 + 1)", Collections.emptyMap())) + ) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -407,18 +403,19 @@ public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { */ public void testScriptSingleValue() throws Exception { - Script script = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", + Collections.emptyMap() + ); SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(script)) - .get(); + .addAggregation(terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).script(script)) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -435,18 +432,19 @@ public void testScriptSingleValue() throws Exception { } public void testScriptMultiValued() throws Exception { - Script script = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", Collections.emptyMap()); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + MULTI_VALUED_FIELD_NAME + "']", + Collections.emptyMap() + ); SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(script)) - .get(); + .addAggregation(terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).script(script)) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -467,14 +465,11 @@ public void testScriptMultiValued() throws Exception { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped", "idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation(terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -491,15 +486,13 @@ public void testPartiallyUnmapped() throws Exception { public void testPartiallyUnmappedWithFormat() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped", "idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .format("0000")) - .get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())).format("0000") + ) + .get(); assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -518,16 +511,14 @@ public void testPartiallyUnmappedWithFormat() throws Exception { public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithTermsSubAgg() throws Exception { boolean asc = true; SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", asc)) - .subAggregation( - avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)) - .subAggregation( - terms("subTerms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))) - ).get(); - + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("avg_i", asc)) + .subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)) + .subAggregation(terms("subTerms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))) + ) + .get(); assertSearchResponse(response); @@ -562,13 +553,13 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithTermsS public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("num_tags") - .field("num_tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter", asc)) - .subAggregation(filter("filter", QueryBuilders.matchAllQuery())) - ).get(); - + .addAggregation( + terms("num_tags").field("num_tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter", asc)) + .subAggregation(filter("filter", QueryBuilders.matchAllQuery())) + ) + .get(); assertSearchResponse(response); @@ -599,15 +590,17 @@ public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("tags") - .field("num_tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter1>filter2>max", asc)) - .subAggregation(filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( - filter("filter2", QueryBuilders.matchAllQuery()) - .subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)))) - ).get(); - + .addAggregation( + terms("tags").field("num_tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter1>filter2>max", asc)) + .subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)) + ) + ) + ) + .get(); assertSearchResponse(response); @@ -654,11 +647,12 @@ public void testSingleValuedFieldOrderedByMissingSubAggregation() throws Excepti for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", true)) - ).get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("avg_i", true)) + ) + .get(); fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation that doesn't exist"); @@ -672,13 +666,13 @@ public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation( for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("num_tags", true)) - .subAggregation(terms("num_tags").field("num_tags") - .collectMode(randomFrom(SubAggCollectionMode.values()))) - ).get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("num_tags", true)) + .subAggregation(terms("num_tags").field("num_tags").collectMode(randomFrom(SubAggCollectionMode.values()))) + ) + .get(); fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation which is not of a metrics type"); @@ -692,15 +686,18 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMe for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.foo", true)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ).get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.foo", true)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); - fail("Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " + - "with an unknown specified metric to order by"); + fail( + "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " + + "with an unknown specified metric to order by" + ); } catch (ElasticsearchException e) { // expected @@ -712,15 +709,18 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats", true)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ).get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats", true)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); - fail("Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " + - "where the metric name is not specified"); + fail( + "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " + + "where the metric name is not specified" + ); } catch (ElasticsearchException e) { // expected @@ -731,12 +731,13 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception { boolean asc = true; SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ).get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); assertSearchResponse(response); @@ -761,12 +762,13 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws E public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws Exception { boolean asc = false; SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ).get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); assertSearchResponse(response); @@ -791,12 +793,13 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Exception { boolean asc = true; SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.variance", asc)) - .subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ).get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.variance", asc)) + .subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); assertSearchResponse(response); @@ -845,9 +848,12 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscSingleValu public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception { long[] expectedKeys = new long[] { 2, 1, 4, 5, 3, 6, 7 }; - assertMultiSortResponse(expectedKeys, BucketOrder.count(false), - BucketOrder.aggregation("sum_d", false), - BucketOrder.aggregation("avg_l", false)); + assertMultiSortResponse( + expectedKeys, + BucketOrder.count(false), + BucketOrder.aggregation("sum_d", false), + BucketOrder.aggregation("avg_l", false) + ); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception { @@ -857,13 +863,14 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) { SearchResponse response = client().prepareSearch("sort_idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.compound(order)) - .subAggregation(avg("avg_l").field("l")) - .subAggregation(sum("sum_d").field("d")) - ).get(); + .addAggregation( + terms("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.compound(order)) + .subAggregation(avg("avg_l").field("l")) + .subAggregation(sum("sum_d").field("d")) + ) + .get(); assertSearchResponse(response); @@ -896,47 +903,132 @@ public void testOtherDocCount() { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - terms("terms").field("d").script( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap()))).get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + terms("terms").field("d") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - terms("terms").field("d").script( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + terms("terms").field("d") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(terms("terms").field("d")).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java index 4dd5b4ec6cefe..5018e8f300a29 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java @@ -94,8 +94,7 @@ protected Map, Object>> pluginScripts() { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping("s", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("s", "type=keyword").get()); cardinality = randomIntBetween(8, 30); final List indexRequests = new ArrayList<>(); @@ -116,14 +115,18 @@ public void setupSuiteScopeCluster() throws Exception { String dateTerm = DateFormatter.forPattern("yyyy-MM-dd").format(time); final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20); for (int j = 0; j < frequency; ++j) { - indexRequests.add(client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field("s", stringTerm) - .field("l", longTerm) - .field("d", doubleTerm) - .field("date", dateTerm) - .field("match", randomBoolean()) - .endObject())); + indexRequests.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field("s", stringTerm) + .field("l", longTerm) + .field("d", doubleTerm) + .field("date", dateTerm) + .field("match", randomBoolean()) + .endObject() + ) + ); } } cardinality = stringTerms.size(); @@ -142,10 +145,17 @@ TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field) { YES { @Override TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field) { - return builder.script(new org.elasticsearch.script.Script(ScriptType.INLINE, - CustomScriptPlugin.NAME, "doc['" + field + "']", Collections.emptyMap())); + return builder.script( + new org.elasticsearch.script.Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + field + "']", + Collections.emptyMap() + ) + ); } }; + abstract TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field); } @@ -299,15 +309,17 @@ private void testMinDocCountOnTerms(String field, Script script, BucketOrder ord private void testMinDocCountOnTerms(String field, Script script, BucketOrder order, String include, boolean retry) throws Exception { // all terms final SearchResponse allTermsResponse = client().prepareSearch("idx") - .setSize(0) - .setQuery(QUERY) - .addAggregation(script.apply(terms("terms"), field) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(randomExecutionHint()) - .order(order) - .size(cardinality + randomInt(10)) - .minDocCount(0)) - .get(); + .setSize(0) + .setQuery(QUERY) + .addAggregation( + script.apply(terms("terms"), field) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(randomExecutionHint()) + .order(order) + .size(cardinality + randomInt(10)) + .minDocCount(0) + ) + .get(); assertAllSuccessful(allTermsResponse); final Terms allTerms = allTermsResponse.getAggregations().get("terms"); @@ -316,16 +328,19 @@ private void testMinDocCountOnTerms(String field, Script script, BucketOrder ord for (long minDocCount = 0; minDocCount < 20; ++minDocCount) { final int size = randomIntBetween(1, cardinality + 2); final SearchRequest request = client().prepareSearch("idx") - .setSize(0) - .setQuery(QUERY) - .addAggregation(script.apply(terms("terms"), field) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(randomExecutionHint()) - .order(order) - .size(size) - .includeExclude(include == null ? null : new IncludeExclude(include, null)) - .shardSize(cardinality + randomInt(10)) - .minDocCount(minDocCount)).request(); + .setSize(0) + .setQuery(QUERY) + .addAggregation( + script.apply(terms("terms"), field) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(randomExecutionHint()) + .order(order) + .size(size) + .includeExclude(include == null ? null : new IncludeExclude(include, null)) + .shardSize(cardinality + randomInt(10)) + .minDocCount(minDocCount) + ) + .request(); final SearchResponse response = client().search(request).get(); assertAllSuccessful(response); assertSubset(allTerms, (Terms) response.getAggregations().get("terms"), minDocCount, size, include); @@ -367,48 +382,40 @@ public void testDateHistogramKeyDesc() throws Exception { private void testMinDocCountOnHistogram(BucketOrder order) throws Exception { final int interval = randomIntBetween(1, 3); final SearchResponse allResponse = client().prepareSearch("idx") - .setSize(0) - .setQuery(QUERY) - .addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(0)) - .get(); + .setSize(0) + .setQuery(QUERY) + .addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(0)) + .get(); final Histogram allHisto = allResponse.getAggregations().get("histo"); for (long minDocCount = 0; minDocCount < 50; ++minDocCount) { final SearchResponse response = client().prepareSearch("idx") - .setSize(0) - .setQuery(QUERY) - .addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(minDocCount)) - .get(); + .setSize(0) + .setQuery(QUERY) + .addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(minDocCount)) + .get(); assertSubset(allHisto, (Histogram) response.getAggregations().get("histo"), minDocCount); } } private void testMinDocCountOnDateHistogram(BucketOrder order) throws Exception { final SearchResponse allResponse = client().prepareSearch("idx") - .setSize(0) - .setQuery(QUERY) - .addAggregation( - dateHistogram("histo") - .field("date") - .fixedInterval(DateHistogramInterval.DAY) - .order(order) - .minDocCount(0)) - .get(); + .setSize(0) + .setQuery(QUERY) + .addAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).order(order).minDocCount(0)) + .get(); final Histogram allHisto = allResponse.getAggregations().get("histo"); for (long minDocCount = 0; minDocCount < 50; ++minDocCount) { final SearchResponse response = client().prepareSearch("idx") - .setSize(0) - .setQuery(QUERY) - .addAggregation( - dateHistogram("histo") - .field("date") - .fixedInterval(DateHistogramInterval.DAY) - .order(order) - .minDocCount(minDocCount)) - .get(); + .setSize(0) + .setQuery(QUERY) + .addAggregation( + dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).order(order).minDocCount(minDocCount) + ) + .get(); assertSubset(allHisto, response.getAggregations().get("histo"), minDocCount); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java index e2d5bcf41c09a..a072d2ae4a10b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java @@ -13,13 +13,13 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; -import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -44,6 +44,7 @@ public AvgAggregationBuilder builder() { factory.field("numeric_field"); return factory; } + @Override public double getValue(Aggregation aggregation) { return ((Avg) aggregation).getValue(); @@ -56,26 +57,30 @@ public ExtendedStatsAggregationBuilder builder() { factory.field("numeric_field"); return factory; } + @Override public String sortKey() { return name + ".variance"; } + @Override public double getValue(Aggregation aggregation) { return ((ExtendedStats) aggregation).getVariance(); } }, - STD_DEVIATION("std_deviation"){ + STD_DEVIATION("std_deviation") { @Override public ExtendedStatsAggregationBuilder builder() { ExtendedStatsAggregationBuilder factory = extendedStats(name); factory.field("numeric_field"); return factory; } + @Override public String sortKey() { return name + ".std_deviation"; } + @Override public double getValue(Aggregation aggregation) { return ((ExtendedStats) aggregation).getStdDeviation(); @@ -88,8 +93,11 @@ public double getValue(Aggregation aggregation) { public String name; - public abstract ValuesSourceAggregationBuilder.LeafOnly> builder(); + public abstract + ValuesSourceAggregationBuilder.LeafOnly< + ValuesSource.Numeric, + ? extends ValuesSourceAggregationBuilder.LeafOnly> + builder(); public String sortKey() { return name; @@ -100,13 +108,14 @@ public String sortKey() { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping("string_value", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("string_value", "type=keyword").get()); final int numDocs = randomIntBetween(2, 10); for (int i = 0; i < numDocs; ++i) { final long value = randomInt(5); - XContentBuilder source = jsonBuilder().startObject().field("long_value", value).field("double_value", value + 0.05) - .field("string_value", "str_" + value); + XContentBuilder source = jsonBuilder().startObject() + .field("long_value", value) + .field("double_value", value + 0.05) + .field("string_value", "str_" + value); if (randomBoolean()) { source.field("numeric_value", randomDouble()); } @@ -142,9 +151,13 @@ public void testTerms(String fieldName) { final boolean asc = randomBoolean(); SubAggregation agg = randomFrom(SubAggregation.values()); SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field(fieldName).collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(agg.builder()).order(BucketOrder.aggregation(agg.sortKey(), asc))) - .get(); + .addAggregation( + terms("terms").field(fieldName) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(agg.builder()) + .order(BucketOrder.aggregation(agg.sortKey(), asc)) + ) + .get(); assertSearchResponse(response); final Terms terms = response.getAggregations().get("terms"); @@ -167,10 +180,13 @@ public void testLongHistogram() { final boolean asc = randomBoolean(); SubAggregation agg = randomFrom(SubAggregation.values()); SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo") - .field("long_value").interval(randomIntBetween(1, 2)) - .subAggregation(agg.builder()).order(BucketOrder.aggregation(agg.sortKey(), asc))) - .get(); + .addAggregation( + histogram("histo").field("long_value") + .interval(randomIntBetween(1, 2)) + .subAggregation(agg.builder()) + .order(BucketOrder.aggregation(agg.sortKey(), asc)) + ) + .get(); assertSearchResponse(response); final Histogram histo = response.getAggregations().get("histo"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index 35c0889896ce7..d12e572c9da6c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -70,8 +70,7 @@ public class NestedIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx") - .setMapping("nested", "type=nested", "incorrect", "type=object")); + assertAcked(prepareCreate("idx").setMapping("nested", "type=nested", "incorrect", "type=object")); ensureGreen("idx"); List builders = new ArrayList<>(); @@ -93,68 +92,90 @@ public void setupSuiteScopeCluster() throws Exception { assertTrue(totalChildren > 0); for (int i = 0; i < numParents; i++) { - XContentBuilder source = jsonBuilder() - .startObject() - .field("value", i + 1) - .startArray("nested"); + XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).startArray("nested"); for (int j = 0; j < numChildren[i]; ++j) { source = source.startObject().field("value", i + 1 + j).endObject(); } source = source.endArray().endObject(); - builders.add(client().prepareIndex("idx").setId(""+i+1).setSource(source)); + builders.add(client().prepareIndex("idx").setId("" + i + 1).setSource(source)); } prepareCreate("empty_bucket_idx").setMapping("value", "type=integer", "nested", "type=nested").get(); ensureGreen("empty_bucket_idx"); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId(""+i).setSource(jsonBuilder() - .startObject() - .field("value", i*2) - .startArray("nested") - .startObject().field("value", i + 1).endObject() - .startObject().field("value", i + 2).endObject() - .startObject().field("value", i + 3).endObject() - .startObject().field("value", i + 4).endObject() - .startObject().field("value", i + 5).endObject() - .endArray() - .endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource( + jsonBuilder().startObject() + .field("value", i * 2) + .startArray("nested") + .startObject() + .field("value", i + 1) + .endObject() + .startObject() + .field("value", i + 2) + .endObject() + .startObject() + .field("value", i + 3) + .endObject() + .startObject() + .field("value", i + 4) + .endObject() + .startObject() + .field("value", i + 5) + .endObject() + .endArray() + .endObject() + ) + ); } - assertAcked(prepareCreate("idx_nested_nested_aggs") - .setMapping(jsonBuilder().startObject().startObject("_doc").startObject("properties") - .startObject("nested1") - .field("type", "nested") - .startObject("properties") - .startObject("nested2") - .field("type", "nested") - .endObject() - .endObject() - .endObject() - .endObject().endObject().endObject())); + assertAcked( + prepareCreate("idx_nested_nested_aggs").setMapping( + jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("nested1") + .field("type", "nested") + .startObject("properties") + .startObject("nested2") + .field("type", "nested") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); ensureGreen("idx_nested_nested_aggs"); builders.add( - client().prepareIndex("idx_nested_nested_aggs").setId("1") - .setSource(jsonBuilder().startObject() - .startArray("nested1") - .startObject() - .field("a", "a") - .startArray("nested2") - .startObject() - .field("b", 2) - .endObject() - .endArray() - .endObject() - .startObject() - .field("a", "b") - .startArray("nested2") - .startObject() - .field("b", 2) - .endObject() - .endArray() - .endObject() - .endArray() - .endObject()) + client().prepareIndex("idx_nested_nested_aggs") + .setId("1") + .setSource( + jsonBuilder().startObject() + .startArray("nested1") + .startObject() + .field("a", "a") + .startArray("nested2") + .startObject() + .field("b", 2) + .endObject() + .endArray() + .endObject() + .startObject() + .field("a", "b") + .startArray("nested2") + .startObject() + .field("b", 2) + .endObject() + .endArray() + .endObject() + .endArray() + .endObject() + ) ); indexRandom(true, builders); ensureSearchable(); @@ -162,13 +183,11 @@ public void setupSuiteScopeCluster() throws Exception { public void testSimple() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(nested("nested", "nested") - .subAggregation(stats("nested_value_stats").field("nested.value"))) - .get(); + .addAggregation(nested("nested", "nested").subAggregation(stats("nested_value_stats").field("nested.value"))) + .get(); assertSearchResponse(response); - double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; long sum = 0; @@ -200,9 +219,8 @@ public void testSimple() throws Exception { public void testNonExistingNestedField() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") - .addAggregation(nested("nested", "value") - .subAggregation(stats("nested_value_stats").field("nested.value"))) - .get(); + .addAggregation(nested("nested", "value").subAggregation(stats("nested_value_stats").field("nested.value"))) + .get(); Nested nested = searchResponse.getAggregations().get("nested"); assertThat(nested, Matchers.notNullValue()); @@ -212,14 +230,13 @@ public void testNonExistingNestedField() throws Exception { public void testNestedWithSubTermsAgg() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(nested("nested", "nested") - .subAggregation(terms("values").field("nested.value").size(100) - .collectMode(aggCollectionMode))) - .get(); + .addAggregation( + nested("nested", "nested").subAggregation(terms("values").field("nested.value").size(100).collectMode(aggCollectionMode)) + ) + .get(); assertSearchResponse(response); - long docCount = 0; long[] counts = new long[numParents + 6]; for (int i = 0; i < numParents; ++i) { @@ -240,7 +257,7 @@ public void testNestedWithSubTermsAgg() throws Exception { assertThat(nested, notNullValue()); assertThat(nested.getName(), equalTo("nested")); assertThat(nested.getDocCount(), equalTo(docCount)); - assertThat(((InternalAggregation)nested).getProperty("_count"), equalTo(docCount)); + assertThat(((InternalAggregation) nested).getProperty("_count"), equalTo(docCount)); assertThat(nested.getAggregations().asList().isEmpty(), is(false)); LongTerms values = nested.getAggregations().get("values"); @@ -258,20 +275,21 @@ public void testNestedWithSubTermsAgg() throws Exception { assertEquals(counts[i], bucket.getDocCount()); } } - assertThat(((InternalAggregation)nested).getProperty("values"), sameInstance(values)); + assertThat(((InternalAggregation) nested).getProperty("values"), sameInstance(values)); } public void testNestedAsSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("top_values").field("value").size(100) - .collectMode(aggCollectionMode) - .subAggregation(nested("nested", "nested") - .subAggregation(max("max_value").field("nested.value")))) - .get(); + .addAggregation( + terms("top_values").field("value") + .size(100) + .collectMode(aggCollectionMode) + .subAggregation(nested("nested", "nested").subAggregation(max("max_value").field("nested.value"))) + ) + .get(); assertSearchResponse(response); - LongTerms values = response.getAggregations().get("top_values"); assertThat(values, notNullValue()); assertThat(values.getName(), equalTo("top_values")); @@ -291,15 +309,16 @@ public void testNestedAsSubAggregation() throws Exception { public void testNestNestedAggs() throws Exception { SearchResponse response = client().prepareSearch("idx_nested_nested_aggs") - .addAggregation(nested("level1", "nested1") - .subAggregation(terms("a").field("nested1.a.keyword") - .collectMode(aggCollectionMode) - .subAggregation(nested("level2", "nested1.nested2") - .subAggregation(sum("sum").field("nested1.nested2.b"))))) - .get(); + .addAggregation( + nested("level1", "nested1").subAggregation( + terms("a").field("nested1.a.keyword") + .collectMode(aggCollectionMode) + .subAggregation(nested("level2", "nested1.nested2").subAggregation(sum("sum").field("nested1.nested2.b"))) + ) + ) + .get(); assertSearchResponse(response); - Nested level1 = response.getAggregations().get("level1"); assertThat(level1, notNullValue()); assertThat(level1.getName(), equalTo("level1")); @@ -326,10 +345,9 @@ public void testNestNestedAggs() throws Exception { public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) - .subAggregation(nested("nested", "nested"))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(nested("nested", "nested"))) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -345,10 +363,7 @@ public void testEmptyAggregation() throws Exception { public void testNestedOnObjectField() throws Exception { try { - client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(nested("object_field", "incorrect")) - .get(); + client().prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(nested("object_field", "incorrect")).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("[nested] nested path [incorrect] is not nested")); @@ -357,69 +372,108 @@ public void testNestedOnObjectField() throws Exception { // Test based on: https://github.com/elastic/elasticsearch/issues/9280 public void testParentFilterResolvedCorrectly() throws Exception { - XContentBuilder mapping = jsonBuilder().startObject().startObject("_doc").startObject("properties") - .startObject("comments") - .field("type", "nested") - .startObject("properties") - .startObject("cid").field("type", "long").endObject() - .startObject("identifier").field("type", "keyword").endObject() - .startObject("tags") - .field("type", "nested") - .startObject("properties") - .startObject("tid").field("type", "long").endObject() - .startObject("name").field("type", "keyword").endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .startObject("dates") - .field("type", "object") - .startObject("properties") - .startObject("day").field("type", "date").field("format", "date_optional_time").endObject() - .startObject("month") - .field("type", "object") - .startObject("properties") - .startObject("end").field("type", "date").field("format", "date_optional_time").endObject() - .startObject("start").field("type", "date").field("format", "date_optional_time").endObject() - .startObject("label").field("type", "keyword").endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .endObject().endObject().endObject(); - assertAcked(prepareCreate("idx2") - .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .setMapping(mapping)); + XContentBuilder mapping = jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("comments") + .field("type", "nested") + .startObject("properties") + .startObject("cid") + .field("type", "long") + .endObject() + .startObject("identifier") + .field("type", "keyword") + .endObject() + .startObject("tags") + .field("type", "nested") + .startObject("properties") + .startObject("tid") + .field("type", "long") + .endObject() + .startObject("name") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .startObject("dates") + .field("type", "object") + .startObject("properties") + .startObject("day") + .field("type", "date") + .field("format", "date_optional_time") + .endObject() + .startObject("month") + .field("type", "object") + .startObject("properties") + .startObject("end") + .field("type", "date") + .field("format", "date_optional_time") + .endObject() + .startObject("start") + .field("type", "date") + .field("format", "date_optional_time") + .endObject() + .startObject("label") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked( + prepareCreate("idx2").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) + .setMapping(mapping) + ); ensureGreen("idx2"); List indexRequests = new ArrayList<>(2); - indexRequests.add(client().prepareIndex("idx2").setId("1") - .setSource("{\"dates\": {\"month\": {\"label\": \"2014-11\", \"end\": \"2014-11-30\", \"start\": \"2014-11-01\"}, " + - "\"day\": \"2014-11-30\"}, \"comments\": [{\"cid\": 3,\"identifier\": \"29111\"}, {\"cid\": 4,\"tags\": [" + - "{\"tid\" :44,\"name\": \"Roles\"}], \"identifier\": \"29101\"}]}", XContentType.JSON)); - indexRequests.add(client().prepareIndex("idx2").setId("2") - .setSource("{\"dates\": {\"month\": {\"label\": \"2014-12\", \"end\": \"2014-12-31\", \"start\": \"2014-12-01\"}, " + - "\"day\": \"2014-12-03\"}, \"comments\": [{\"cid\": 1, \"identifier\": \"29111\"}, {\"cid\": 2,\"tags\": [" + - "{\"tid\" : 22, \"name\": \"DataChannels\"}], \"identifier\": \"29101\"}]}", XContentType.JSON)); + indexRequests.add( + client().prepareIndex("idx2") + .setId("1") + .setSource( + "{\"dates\": {\"month\": {\"label\": \"2014-11\", \"end\": \"2014-11-30\", \"start\": \"2014-11-01\"}, " + + "\"day\": \"2014-11-30\"}, \"comments\": [{\"cid\": 3,\"identifier\": \"29111\"}, {\"cid\": 4,\"tags\": [" + + "{\"tid\" :44,\"name\": \"Roles\"}], \"identifier\": \"29101\"}]}", + XContentType.JSON + ) + ); + indexRequests.add( + client().prepareIndex("idx2") + .setId("2") + .setSource( + "{\"dates\": {\"month\": {\"label\": \"2014-12\", \"end\": \"2014-12-31\", \"start\": \"2014-12-01\"}, " + + "\"day\": \"2014-12-03\"}, \"comments\": [{\"cid\": 1, \"identifier\": \"29111\"}, {\"cid\": 2,\"tags\": [" + + "{\"tid\" : 22, \"name\": \"DataChannels\"}], \"identifier\": \"29101\"}]}", + XContentType.JSON + ) + ); indexRandom(true, indexRequests); SearchResponse response = client().prepareSearch("idx2") - .addAggregation( - terms("startDate").field("dates.month.start").subAggregation( - terms("endDate").field("dates.month.end").subAggregation( - terms("period").field("dates.month.label").subAggregation( - nested("ctxt_idfier_nested", "comments") - .subAggregation(filter("comment_filter", termQuery("comments.identifier", "29111")) - .subAggregation(nested("nested_tags", "comments.tags") - .subAggregation( - terms("tag").field("comments.tags.name") - ) - ) + .addAggregation( + terms("startDate").field("dates.month.start") + .subAggregation( + terms("endDate").field("dates.month.end") + .subAggregation( + terms("period").field("dates.month.label") + .subAggregation( + nested("ctxt_idfier_nested", "comments").subAggregation( + filter("comment_filter", termQuery("comments.identifier", "29111")).subAggregation( + nested("nested_tags", "comments.tags").subAggregation( + terms("tag").field("comments.tags.name") ) + ) ) - ) - ) - ).get(); + ) + ) + ) + ) + .get(); assertNoFailures(response); assertHitCount(response, 2); @@ -462,39 +516,59 @@ public void testParentFilterResolvedCorrectly() throws Exception { public void testNestedSameDocIdProcessedMultipleTime() throws Exception { assertAcked( - prepareCreate("idx4") - .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .setMapping("categories", "type=keyword", "name", "type=text", "property", "type=nested") + prepareCreate("idx4").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) + .setMapping("categories", "type=keyword", "name", "type=text", "property", "type=nested") ); ensureGreen("idx4"); - client().prepareIndex("idx4").setId("1").setSource(jsonBuilder().startObject() + client().prepareIndex("idx4") + .setId("1") + .setSource( + jsonBuilder().startObject() .field("name", "product1") .array("categories", "1", "2", "3", "4") .startArray("property") - .startObject().field("id", 1).endObject() - .startObject().field("id", 2).endObject() - .startObject().field("id", 3).endObject() + .startObject() + .field("id", 1) + .endObject() + .startObject() + .field("id", 2) + .endObject() + .startObject() + .field("id", 3) + .endObject() + .endArray() + .endObject() + ) + .get(); + client().prepareIndex("idx4") + .setId("2") + .setSource( + jsonBuilder().startObject() + .field("name", "product2") + .array("categories", "1", "2") + .startArray("property") + .startObject() + .field("id", 1) + .endObject() + .startObject() + .field("id", 5) + .endObject() + .startObject() + .field("id", 4) + .endObject() .endArray() - .endObject()).get(); - client().prepareIndex("idx4").setId("2").setSource(jsonBuilder().startObject() - .field("name", "product2") - .array("categories", "1", "2") - .startArray("property") - .startObject().field("id", 1).endObject() - .startObject().field("id", 5).endObject() - .startObject().field("id", 4).endObject() - .endArray() - .endObject()).get(); + .endObject() + ) + .get(); refresh(); SearchResponse response = client().prepareSearch("idx4") - .addAggregation(terms("category").field("categories").subAggregation( - nested("property", "property").subAggregation( - terms("property_id").field("property.id") - ) - )) - .get(); + .addAggregation( + terms("category").field("categories") + .subAggregation(nested("property", "property").subAggregation(terms("property_id").field("property.id"))) + ) + .get(); assertNoFailures(response); assertHitCount(response, 2); @@ -547,110 +621,151 @@ public void testNestedSameDocIdProcessedMultipleTime() throws Exception { } public void testFilterAggInsideNestedAgg() throws Exception { - assertAcked(prepareCreate("classes") - .setMapping(jsonBuilder().startObject().startObject("_doc").startObject("properties") - .startObject("name").field("type", "text").endObject() - .startObject("methods") - .field("type", "nested") - .startObject("properties") - .startObject("name").field("type", "text").endObject() - .startObject("return_type").field("type", "keyword").endObject() - .startObject("parameters") - .field("type", "nested") - .startObject("properties") - .startObject("name").field("type", "text").endObject() - .startObject("type").field("type", "keyword").endObject() - .endObject() - .endObject() - .endObject() - .endObject().endObject().endObject().endObject())); + assertAcked( + prepareCreate("classes").setMapping( + jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("name") + .field("type", "text") + .endObject() + .startObject("methods") + .field("type", "nested") + .startObject("properties") + .startObject("name") + .field("type", "text") + .endObject() + .startObject("return_type") + .field("type", "keyword") + .endObject() + .startObject("parameters") + .field("type", "nested") + .startObject("properties") + .startObject("name") + .field("type", "text") + .endObject() + .startObject("type") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); - client().prepareIndex("classes").setId("1").setSource(jsonBuilder().startObject() + client().prepareIndex("classes") + .setId("1") + .setSource( + jsonBuilder().startObject() .field("name", "QueryBuilder") .startArray("methods") - .startObject() - .field("name", "toQuery") - .field("return_type", "Query") - .startArray("parameters") - .startObject() - .field("name", "context") - .field("type", "SearchExecutionContext") - .endObject() - .endArray() - .endObject() - .startObject() - .field("name", "queryName") - .field("return_type", "QueryBuilder") - .startArray("parameters") - .startObject() - .field("name", "queryName") - .field("type", "String") - .endObject() - .endArray() - .endObject() - .startObject() - .field("name", "boost") - .field("return_type", "QueryBuilder") - .startArray("parameters") - .startObject() - .field("name", "boost") - .field("type", "float") - .endObject() - .endArray() - .endObject() + .startObject() + .field("name", "toQuery") + .field("return_type", "Query") + .startArray("parameters") + .startObject() + .field("name", "context") + .field("type", "SearchExecutionContext") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "queryName") + .field("return_type", "QueryBuilder") + .startArray("parameters") + .startObject() + .field("name", "queryName") + .field("type", "String") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "boost") + .field("return_type", "QueryBuilder") + .startArray("parameters") + .startObject() + .field("name", "boost") + .field("type", "float") + .endObject() + .endArray() + .endObject() .endArray() - .endObject()).get(); - client().prepareIndex("classes").setId("2").setSource(jsonBuilder().startObject() + .endObject() + ) + .get(); + client().prepareIndex("classes") + .setId("2") + .setSource( + jsonBuilder().startObject() .field("name", "Document") .startArray("methods") - .startObject() - .field("name", "add") - .field("return_type", "void") - .startArray("parameters") - .startObject() - .field("name", "field") - .field("type", "IndexableField") - .endObject() - .endArray() - .endObject() - .startObject() - .field("name", "removeField") - .field("return_type", "void") - .startArray("parameters") - .startObject() - .field("name", "name") - .field("type", "String") - .endObject() - .endArray() - .endObject() - .startObject() - .field("name", "removeFields") - .field("return_type", "void") - .startArray("parameters") - .startObject() - .field("name", "name") - .field("type", "String") - .endObject() - .endArray() - .endObject() + .startObject() + .field("name", "add") + .field("return_type", "void") + .startArray("parameters") + .startObject() + .field("name", "field") + .field("type", "IndexableField") + .endObject() .endArray() - .endObject()).get(); + .endObject() + .startObject() + .field("name", "removeField") + .field("return_type", "void") + .startArray("parameters") + .startObject() + .field("name", "name") + .field("type", "String") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "removeFields") + .field("return_type", "void") + .startArray("parameters") + .startObject() + .field("name", "name") + .field("type", "String") + .endObject() + .endArray() + .endObject() + .endArray() + .endObject() + ) + .get(); refresh(); - SearchResponse response = client().prepareSearch("classes").addAggregation(nested("to_method", "methods") - .subAggregation(filter("num_string_params", - nestedQuery("methods.parameters", termQuery("methods.parameters.type", "String"), ScoreMode.None))) - ).get(); + SearchResponse response = client().prepareSearch("classes") + .addAggregation( + nested("to_method", "methods").subAggregation( + filter( + "num_string_params", + nestedQuery("methods.parameters", termQuery("methods.parameters.type", "String"), ScoreMode.None) + ) + ) + ) + .get(); Nested toMethods = response.getAggregations().get("to_method"); Filter numStringParams = toMethods.getAggregations().get("num_string_params"); assertThat(numStringParams.getDocCount(), equalTo(3L)); - response = client().prepareSearch("classes").addAggregation(nested("to_method", "methods") - .subAggregation(terms("return_type").field("methods.return_type").subAggregation( - filter("num_string_params", nestedQuery("methods.parameters", - termQuery("methods.parameters.type", "String"), ScoreMode.None)) + response = client().prepareSearch("classes") + .addAggregation( + nested("to_method", "methods").subAggregation( + terms("return_type").field("methods.return_type") + .subAggregation( + filter( + "num_string_params", + nestedQuery("methods.parameters", termQuery("methods.parameters.type", "String"), ScoreMode.None) + ) ) - )).get(); + ) + ) + .get(); toMethods = response.getAggregations().get("to_method"); Terms terms = toMethods.getAggregations().get("return_type"); Bucket bucket = terms.getBucketByKey("void"); @@ -671,43 +786,47 @@ public void testFilterAggInsideNestedAgg() throws Exception { public void testExtractInnerHitBuildersWithDuplicateHitName() throws Exception { assertAcked( - prepareCreate("idxduplicatehitnames") - .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .setMapping("categories", "type=keyword", "name", "type=text", "property", "type=nested") + prepareCreate("idxduplicatehitnames").setSettings( + Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0) + ).setMapping("categories", "type=keyword", "name", "type=text", "property", "type=nested") ); ensureGreen("idxduplicatehitnames"); - SearchRequestBuilder searchRequestBuilder = client() - .prepareSearch("idxduplicatehitnames") - .setQuery(boolQuery() - .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih1"))) - .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih2"))) - .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih1")))); + SearchRequestBuilder searchRequestBuilder = client().prepareSearch("idxduplicatehitnames") + .setQuery( + boolQuery().should( + nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih1")) + ) + .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih2"))) + .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih1"))) + ); assertFailures( searchRequestBuilder, RestStatus.BAD_REQUEST, - containsString("[inner_hits] already contains an entry for key [ih1]")); + containsString("[inner_hits] already contains an entry for key [ih1]") + ); } public void testExtractInnerHitBuildersWithDuplicatePath() throws Exception { assertAcked( - prepareCreate("idxnullhitnames") - .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .setMapping("categories", "type=keyword", "name", "type=text", "property", "type=nested") + prepareCreate("idxnullhitnames").setSettings( + Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0) + ).setMapping("categories", "type=keyword", "name", "type=text", "property", "type=nested") ); ensureGreen("idxnullhitnames"); - SearchRequestBuilder searchRequestBuilder = client() - .prepareSearch("idxnullhitnames") - .setQuery(boolQuery() - .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder())) - .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder())) - .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder()))); + SearchRequestBuilder searchRequestBuilder = client().prepareSearch("idxnullhitnames") + .setQuery( + boolQuery().should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder())) + .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder())) + .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder())) + ); assertFailures( searchRequestBuilder, RestStatus.BAD_REQUEST, - containsString("[inner_hits] already contains an entry for key [property]")); + containsString("[inner_hits] already contains an entry for key [property]") + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index fafc30d20d0cd..b5aba9e4113c0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -98,30 +98,38 @@ public void setupSuiteScopeCluster() throws Exception { numDocs = randomIntBetween(10, 20); List builders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { - builders.add(client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field(SINGLE_VALUED_FIELD_NAME, i+1) - .startArray(MULTI_VALUED_FIELD_NAME).value(i+1).value(i+2).endArray() - .endObject())); + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, i + 1) + .startArray(MULTI_VALUED_FIELD_NAME) + .value(i + 1) + .value(i + 2) + .endArray() + .endObject() + ) + ); } createIndex("idx_unmapped"); prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer").get(); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource(jsonBuilder() - .startObject() - // shift sequence by 1, to ensure we have negative values, and value 3 on the edge of the tested ranges - .field(SINGLE_VALUED_FIELD_NAME, i * 2 - 1) - .endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource( + jsonBuilder().startObject() + // shift sequence by 1, to ensure we have negative values, and value 3 on the edge of the tested ranges + .field(SINGLE_VALUED_FIELD_NAME, i * 2 - 1) + .endObject() + ) + ); } // Create two indices and add the field 'route_length_miles' as an alias in // one, and a concrete field in the other. - prepareCreate("old_index") - .setMapping("distance", "type=double", "route_length_miles", "type=alias,path=distance") - .get(); - prepareCreate("new_index") - .setMapping("route_length_miles", "type=double") - .get(); + prepareCreate("old_index").setMapping("distance", "type=double", "route_length_miles", "type=alias,path=distance").get(); + prepareCreate("new_index").setMapping("route_length_miles", "type=double").get(); builders.add(client().prepareIndex("old_index").setSource("distance", 42.0)); builders.add(client().prepareIndex("old_index").setSource("distance", 50.5)); @@ -134,13 +142,13 @@ public void setupSuiteScopeCluster() throws Exception { public void testRangeAsSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field(MULTI_VALUED_FIELD_NAME).size(100) - .collectMode(randomFrom(SubAggCollectionMode.values())).subAggregation( - range("range").field(SINGLE_VALUED_FIELD_NAME) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6))) - .get(); + .addAggregation( + terms("terms").field(MULTI_VALUED_FIELD_NAME) + .size(100) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) + ) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); @@ -197,16 +205,11 @@ public void testRangeAsSubAggregation() throws Exception { public void testSingleValueField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range") - .field(SINGLE_VALUED_FIELD_NAME) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6)) - .get(); + .addAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -242,15 +245,12 @@ public void testSingleValueField() throws Exception { } public void testSingleValueFieldWithFormat() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6).format("#")) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6).format("#")) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -287,16 +287,13 @@ public void testSingleValueFieldWithFormat() throws Exception { public void testSingleValueFieldWithCustomKey() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range") - .field(SINGLE_VALUED_FIELD_NAME) - .addUnboundedTo("r1", 3) - .addRange("r2", 3, 6) - .addUnboundedFrom("r3", 6)) - .get(); + .addAggregation( + range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo("r1", 3).addRange("r2", 3, 6).addUnboundedFrom("r3", 6) + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -333,25 +330,25 @@ public void testSingleValueFieldWithCustomKey() throws Exception { public void testSingleValuedFieldWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range") - .field(SINGLE_VALUED_FIELD_NAME) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .get(); + .addAggregation( + range("range").field(SINGLE_VALUED_FIELD_NAME) + .addUnboundedTo(3) + .addRange(3, 6) + .addUnboundedFrom(6) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); List buckets = range.getBuckets(); assertThat(range.getBuckets().size(), equalTo(3)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)range).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)range).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation)range).getProperty("sum.value"); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) range).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) range).getProperty("_count"); + Object[] propertiesCounts = (Object[]) ((InternalAggregation) range).getProperty("sum.value"); Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); @@ -404,16 +401,15 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { } public void testSingleValuedFieldWithValueScript() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - range("range") - .field(SINGLE_VALUED_FIELD_NAME) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6)) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + range("range").field(SINGLE_VALUED_FIELD_NAME) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + .addUnboundedTo(3) + .addRange(3, 6) + .addUnboundedFrom(6) + ) + .get(); assertSearchResponse(response); @@ -466,16 +462,11 @@ public void testSingleValuedFieldWithValueScript() throws Exception { public void testMultiValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range") - .field(MULTI_VALUED_FIELD_NAME) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6)) - .get(); + .addAggregation(range("range").field(MULTI_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -524,20 +515,18 @@ public void testMultiValuedField() throws Exception { */ public void testMultiValuedFieldWithValueScript() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - range("range") - .field(MULTI_VALUED_FIELD_NAME) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6)) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + range("range").field(MULTI_VALUED_FIELD_NAME) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + .addUnboundedTo(3) + .addRange(3, 6) + .addUnboundedFrom(6) + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -590,21 +579,18 @@ public void testMultiValuedFieldWithValueScript() throws Exception { */ public void testScriptSingleValue() throws Exception { - Script script = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()); - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - range("range") - .script(script) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6)) - .get(); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", + Collections.emptyMap() + ); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(range("range").script(script).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -641,15 +627,11 @@ public void testScriptSingleValue() throws Exception { public void testEmptyRange() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range") - .field(MULTI_VALUED_FIELD_NAME) - .addUnboundedTo(-1) - .addUnboundedFrom(1000)) - .get(); + .addAggregation(range("range").field(MULTI_VALUED_FIELD_NAME).addUnboundedTo(-1).addUnboundedFrom(1000)) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -675,14 +657,11 @@ public void testEmptyRange() throws Exception { assertThat(bucket.getDocCount(), equalTo(0L)); } - public void testNoRangesInQuery() { + public void testNoRangesInQuery() { try { - client().prepareSearch("idx") - .addAggregation(range("foobar") - .field(SINGLE_VALUED_FIELD_NAME)) - .get(); + client().prepareSearch("idx").addAggregation(range("foobar").field(SINGLE_VALUED_FIELD_NAME)).get(); fail(); - } catch (SearchPhaseExecutionException spee){ + } catch (SearchPhaseExecutionException spee) { Throwable rootCause = spee.getCause().getCause(); assertThat(rootCause, instanceOf(IllegalArgumentException.class)); assertEquals(rootCause.getMessage(), "No [ranges] specified for the [foobar] aggregation"); @@ -690,21 +669,18 @@ public void testNoRangesInQuery() { } public void testScriptMultiValued() throws Exception { - Script script = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - range("range") - .script(script) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6)) - .get(); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + MULTI_VALUED_FIELD_NAME + "']", + Collections.emptyMap() + ); - assertSearchResponse(response); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(range("range").script(script).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) + .get(); + assertSearchResponse(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -759,16 +735,11 @@ public void testScriptMultiValued() throws Exception { public void testUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(range("range") - .field(SINGLE_VALUED_FIELD_NAME) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6)) - .get(); + .addAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -807,16 +778,11 @@ public void testPartiallyUnmapped() throws Exception { client().admin().cluster().prepareHealth("idx_unmapped").setWaitForYellowStatus().get(); SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(range("range") - .field(SINGLE_VALUED_FIELD_NAME) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6)) - .get(); + .addAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -853,17 +819,13 @@ public void testPartiallyUnmapped() throws Exception { public void testOverlappingRanges() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range") - .field(MULTI_VALUED_FIELD_NAME) - .addUnboundedTo(5) - .addRange(3, 6) - .addRange(4, 5) - .addUnboundedFrom(4)) - .get(); + .addAggregation( + range("range").field(MULTI_VALUED_FIELD_NAME).addUnboundedTo(5).addRange(3, 6).addRange(4, 5).addUnboundedFrom(4) + ) + .get(); assertSearchResponse(response); - Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); @@ -909,17 +871,14 @@ public void testOverlappingRanges() throws Exception { public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(1L) - .minDocCount(0) - .subAggregation( - range("range") - .field(SINGLE_VALUED_FIELD_NAME) - .addRange("0-2", 0.0, 2.0))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(1L) + .minDocCount(0) + .subAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addRange("0-2", 0.0, 2.0)) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -947,62 +906,142 @@ public void testEmptyAggregation() throws Exception { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("i", "type=integer") + assertAcked( + prepareCreate("cache_test_idx").setMapping("i", "type=integer") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, - client().prepareIndex("cache_test_idx").setId("1").setSource(jsonBuilder().startObject().field("i", 1).endObject()), - client().prepareIndex("cache_test_idx").setId("2").setSource(jsonBuilder().startObject().field("i", 2).endObject())); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource(jsonBuilder().startObject().field("i", 1).endObject()), + client().prepareIndex("cache_test_idx").setId("2").setSource(jsonBuilder().startObject().field("i", 2).endObject()) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached Map params = new HashMap<>(); params.put("fieldname", "date"); - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - range("foo").field("i").script( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap())).addRange(0, 10)) - .get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + range("foo").field("i") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap())) + .addRange(0, 10) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - range("foo").field("i").script( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())).addRange(0, 10)) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + range("foo").field("i") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + .addRange(0, 10) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(range("foo").field("i").addRange(0, 10)).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } public void testFieldAlias() { SearchResponse response = client().prepareSearch("old_index", "new_index") - .addAggregation(range("range") - .field("route_length_miles") - .addUnboundedTo(50.0) - .addRange(50.0, 150.0) - .addUnboundedFrom(150.0)) + .addAggregation(range("range").field("route_length_miles").addUnboundedTo(50.0).addRange(50.0, 150.0).addUnboundedFrom(150.0)) .get(); assertSearchResponse(response); @@ -1029,15 +1068,11 @@ public void testFieldAlias() { assertThat(bucket.getDocCount(), equalTo(0L)); } - public void testFieldAliasWithMissingValue() { SearchResponse response = client().prepareSearch("old_index", "new_index") - .addAggregation(range("range") - .field("route_length_miles") - .missing(0.0) - .addUnboundedTo(50.0) - .addRange(50.0, 150.0) - .addUnboundedFrom(150.0)) + .addAggregation( + range("range").field("route_length_miles").missing(0.0).addUnboundedTo(50.0).addRange(50.0, 150.0).addUnboundedFrom(150.0) + ) .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java index cfc333b87acd2..20b6f761b0ec4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java @@ -12,13 +12,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.nested.Nested; import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.ValueCount; -import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -50,30 +50,52 @@ public class ReverseNestedIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx1") - .setMapping( - jsonBuilder().startObject().startObject("properties") - .startObject("field1").field("type", "keyword").endObject() - .startObject("alias") - .field("type", "alias") - .field("path", "field1") - .endObject() - .startObject("nested1").field("type", "nested").startObject("properties") - .startObject("field2").field("type", "keyword").endObject() - .endObject().endObject() - .endObject().endObject() - )); - assertAcked(prepareCreate("idx2") - .setMapping( - jsonBuilder().startObject().startObject("properties") - .startObject("nested1").field("type", "nested").startObject("properties") - .startObject("field1").field("type", "keyword").endObject() - .startObject("nested2").field("type", "nested").startObject("properties") - .startObject("field2").field("type", "keyword").endObject() - .endObject().endObject() - .endObject().endObject() - .endObject().endObject() - ) + assertAcked( + prepareCreate("idx1").setMapping( + jsonBuilder().startObject() + .startObject("properties") + .startObject("field1") + .field("type", "keyword") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "field1") + .endObject() + .startObject("nested1") + .field("type", "nested") + .startObject("properties") + .startObject("field2") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + assertAcked( + prepareCreate("idx2").setMapping( + jsonBuilder().startObject() + .startObject("properties") + .startObject("nested1") + .field("type", "nested") + .startObject("properties") + .startObject("field1") + .field("type", "keyword") + .endObject() + .startObject("nested2") + .field("type", "nested") + .startObject("properties") + .startObject("field2") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ) ); insertIdx1(Arrays.asList("a", "b", "c"), Arrays.asList("1", "2", "3", "4")); @@ -89,22 +111,21 @@ public void setupSuiteScopeCluster() throws Exception { insertIdx1(Arrays.asList("z"), Arrays.asList("5", "9")); refresh(); - insertIdx2(new String[][]{new String[]{"a", "0", "0", "1", "2"}, new String[]{"b", "0", "1", "1", "2"}, new String[]{"a", "0"}}); - insertIdx2(new String[][]{new String[]{"c", "1", "1", "2", "2"}, new String[]{"d", "3", "4"}}); + insertIdx2( + new String[][] { new String[] { "a", "0", "0", "1", "2" }, new String[] { "b", "0", "1", "1", "2" }, new String[] { "a", "0" } } + ); + insertIdx2(new String[][] { new String[] { "c", "1", "1", "2", "2" }, new String[] { "d", "3", "4" } }); refresh(); - insertIdx2(new String[][]{new String[]{"a", "0", "0", "0", "0"}, new String[]{"b", "0", "0", "0", "0"}}); - insertIdx2(new String[][]{new String[]{"e", "1", "2"}, new String[]{"f", "3", "4"}}); + insertIdx2(new String[][] { new String[] { "a", "0", "0", "0", "0" }, new String[] { "b", "0", "0", "0", "0" } }); + insertIdx2(new String[][] { new String[] { "e", "1", "2" }, new String[] { "f", "3", "4" } }); refresh(); ensureSearchable(); } private void insertIdx1(List values1, List values2) throws Exception { - XContentBuilder source = jsonBuilder() - .startObject() - .array("field1", values1.toArray()) - .startArray("nested1"); + XContentBuilder source = jsonBuilder().startObject().array("field1", values1.toArray()).startArray("nested1"); for (String value1 : values2) { source.startObject().field("field2", value1).endObject(); } @@ -113,9 +134,7 @@ private void insertIdx1(List values1, List values2) throws Excep } private void insertIdx2(String[][] values) throws Exception { - XContentBuilder source = jsonBuilder() - .startObject() - .startArray("nested1"); + XContentBuilder source = jsonBuilder().startObject().startArray("nested1"); for (String[] value : values) { source.startObject().field("field1", value[0]).startArray("nested2"); for (int i = 1; i < value.length; i++) { @@ -129,18 +148,17 @@ private void insertIdx2(String[][] values) throws Exception { public void testSimpleReverseNestedToRoot() throws Exception { SearchResponse response = client().prepareSearch("idx1") - .addAggregation(nested("nested1", "nested1") + .addAggregation( + nested("nested1", "nested1").subAggregation( + terms("field2").field("nested1.field2") .subAggregation( - terms("field2").field("nested1.field2") - .subAggregation( - reverseNested("nested1_to_field1") - .subAggregation( - terms("field1").field("field1") - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - ) + reverseNested("nested1_to_field1").subAggregation( + terms("field1").field("field1").collectMode(randomFrom(SubAggCollectionMode.values())) + ) ) - ).get(); + ) + ) + .get(); assertSearchResponse(response); @@ -160,9 +178,9 @@ public void testSimpleReverseNestedToRoot() throws Exception { assertThat(bucket.getKeyAsString(), equalTo("1")); assertThat(bucket.getDocCount(), equalTo(6L)); ReverseNested reverseNested = bucket.getAggregations().get("nested1_to_field1"); - assertThat(((InternalAggregation)reverseNested).getProperty("_count"), equalTo(5L)); + assertThat(((InternalAggregation) reverseNested).getProperty("_count"), equalTo(5L)); Terms tags = reverseNested.getAggregations().get("field1"); - assertThat(((InternalAggregation)reverseNested).getProperty("field1"), sameInstance(tags)); + assertThat(((InternalAggregation) reverseNested).getProperty("field1"), sameInstance(tags)); List tagsBuckets = new ArrayList<>(tags.getBuckets()); assertThat(tagsBuckets.size(), equalTo(6)); assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c")); @@ -317,13 +335,12 @@ public void testSimpleReverseNestedToRoot() throws Exception { public void testSimpleNested1ToRootToNested2() throws Exception { SearchResponse response = client().prepareSearch("idx2") - .addAggregation(nested("nested1", "nested1") - .subAggregation( - reverseNested("nested1_to_root") - .subAggregation(nested("root_to_nested2", "nested1.nested2")) - ) - ) - .get(); + .addAggregation( + nested("nested1", "nested1").subAggregation( + reverseNested("nested1_to_root").subAggregation(nested("root_to_nested2", "nested1.nested2")) + ) + ) + .get(); assertSearchResponse(response); Nested nested = response.getAggregations().get("nested1"); @@ -339,20 +356,23 @@ public void testSimpleNested1ToRootToNested2() throws Exception { public void testSimpleReverseNestedToNested1() throws Exception { SearchResponse response = client().prepareSearch("idx2") - .addAggregation(nested("nested1", "nested1.nested2") + .addAggregation( + nested("nested1", "nested1.nested2").subAggregation( + terms("field2").field("nested1.nested2.field2") + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .size(10000) + .subAggregation( + reverseNested("nested1_to_field1").path("nested1") .subAggregation( - terms("field2").field("nested1.nested2.field2").order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .size(10000) - .subAggregation( - reverseNested("nested1_to_field1").path("nested1") - .subAggregation( - terms("field1").field("nested1.field1").order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - ) + terms("field1").field("nested1.field1") + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) ) - ).get(); + ) + ) + ) + .get(); assertSearchResponse(response); @@ -442,16 +462,16 @@ public void testSimpleReverseNestedToNested1() throws Exception { public void testReverseNestedAggWithoutNestedAgg() { try { client().prepareSearch("idx2") - .addAggregation(terms("field2").field("nested1.nested2.field2") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation( - reverseNested("nested1_to_field1") - .subAggregation( - terms("field1").field("nested1.field1") - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - ) - ).get(); + .addAggregation( + terms("field2").field("nested1.nested2.field2") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation( + reverseNested("nested1_to_field1").subAggregation( + terms("field1").field("nested1.field1").collectMode(randomFrom(SubAggCollectionMode.values())) + ) + ) + ) + .get(); fail("Expected SearchPhaseExecutionException"); } catch (SearchPhaseExecutionException e) { assertThat(e.getMessage(), is("all shards failed")); @@ -460,9 +480,9 @@ public void testReverseNestedAggWithoutNestedAgg() { public void testNonExistingNestedField() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) - .addAggregation(nested("nested2", "nested1.nested2").subAggregation(reverseNested("incorrect").path("nested3"))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(nested("nested2", "nested1.nested2").subAggregation(reverseNested("incorrect").path("nested3"))) + .get(); Nested nested = searchResponse.getAggregations().get("nested2"); assertThat(nested, notNullValue()); @@ -473,9 +493,9 @@ public void testNonExistingNestedField() throws Exception { // Test that parsing the reverse_nested agg doesn't fail, because the parent nested agg is unmapped: searchResponse = client().prepareSearch("idx1") - .setQuery(matchAllQuery()) - .addAggregation(nested("incorrect1", "incorrect1").subAggregation(reverseNested("incorrect2").path("incorrect2"))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(nested("incorrect1", "incorrect1").subAggregation(reverseNested("incorrect2").path("incorrect2"))) + .get(); nested = searchResponse.getAggregations().get("incorrect1"); assertThat(nested, notNullValue()); @@ -484,94 +504,138 @@ public void testNonExistingNestedField() throws Exception { } public void testSameParentDocHavingMultipleBuckets() throws Exception { - XContentBuilder mapping = jsonBuilder().startObject().startObject("_doc").field("dynamic", "strict").startObject("properties") - .startObject("id").field("type", "long").endObject() - .startObject("category") - .field("type", "nested") - .startObject("properties") - .startObject("name").field("type", "keyword").endObject() - .endObject() - .endObject() - .startObject("sku") - .field("type", "nested") - .startObject("properties") - .startObject("sku_type").field("type", "keyword").endObject() - .startObject("colors") - .field("type", "nested") - .startObject("properties") - .startObject("name").field("type", "keyword").endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .endObject().endObject().endObject(); + XContentBuilder mapping = jsonBuilder().startObject() + .startObject("_doc") + .field("dynamic", "strict") + .startObject("properties") + .startObject("id") + .field("type", "long") + .endObject() + .startObject("category") + .field("type", "nested") + .startObject("properties") + .startObject("name") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .startObject("sku") + .field("type", "nested") + .startObject("properties") + .startObject("sku_type") + .field("type", "keyword") + .endObject() + .startObject("colors") + .field("type", "nested") + .startObject("properties") + .startObject("name") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); assertAcked( - prepareCreate("idx3") - .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .setMapping(mapping) + prepareCreate("idx3").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) + .setMapping(mapping) ); - client().prepareIndex("idx3").setId("1").setRefreshPolicy(IMMEDIATE).setSource( + client().prepareIndex("idx3") + .setId("1") + .setRefreshPolicy(IMMEDIATE) + .setSource( jsonBuilder().startObject() - .startArray("sku") - .startObject() - .field("sku_type", "bar1") - .startArray("colors") - .startObject().field("name", "red").endObject() - .startObject().field("name", "green").endObject() - .startObject().field("name", "yellow").endObject() - .endArray() - .endObject() - .startObject() - .field("sku_type", "bar1") - .startArray("colors") - .startObject().field("name", "red").endObject() - .startObject().field("name", "blue").endObject() - .startObject().field("name", "white").endObject() - .endArray() - .endObject() - .startObject() - .field("sku_type", "bar1") - .startArray("colors") - .startObject().field("name", "black").endObject() - .startObject().field("name", "blue").endObject() - .endArray() - .endObject() - .startObject() - .field("sku_type", "bar2") - .startArray("colors") - .startObject().field("name", "orange").endObject() - .endArray() - .endObject() - .startObject() - .field("sku_type", "bar2") - .startArray("colors") - .startObject().field("name", "pink").endObject() - .endArray() - .endObject() - .endArray() - .startArray("category") - .startObject().field("name", "abc").endObject() - .startObject().field("name", "klm").endObject() - .startObject().field("name", "xyz").endObject() - .endArray() - .endObject() - ).get(); + .startArray("sku") + .startObject() + .field("sku_type", "bar1") + .startArray("colors") + .startObject() + .field("name", "red") + .endObject() + .startObject() + .field("name", "green") + .endObject() + .startObject() + .field("name", "yellow") + .endObject() + .endArray() + .endObject() + .startObject() + .field("sku_type", "bar1") + .startArray("colors") + .startObject() + .field("name", "red") + .endObject() + .startObject() + .field("name", "blue") + .endObject() + .startObject() + .field("name", "white") + .endObject() + .endArray() + .endObject() + .startObject() + .field("sku_type", "bar1") + .startArray("colors") + .startObject() + .field("name", "black") + .endObject() + .startObject() + .field("name", "blue") + .endObject() + .endArray() + .endObject() + .startObject() + .field("sku_type", "bar2") + .startArray("colors") + .startObject() + .field("name", "orange") + .endObject() + .endArray() + .endObject() + .startObject() + .field("sku_type", "bar2") + .startArray("colors") + .startObject() + .field("name", "pink") + .endObject() + .endArray() + .endObject() + .endArray() + .startArray("category") + .startObject() + .field("name", "abc") + .endObject() + .startObject() + .field("name", "klm") + .endObject() + .startObject() + .field("name", "xyz") + .endObject() + .endArray() + .endObject() + ) + .get(); SearchResponse response = client().prepareSearch("idx3") - .addAggregation( - nested("nested_0", "category").subAggregation( - terms("group_by_category").field("category.name").subAggregation( - reverseNested("to_root").subAggregation( - nested("nested_1", "sku").subAggregation( - filter("filter_by_sku", termQuery("sku.sku_type", "bar1")).subAggregation( - count("sku_count").field("sku.sku_type") - ) - ) - ) + .addAggregation( + nested("nested_0", "category").subAggregation( + terms("group_by_category").field("category.name") + .subAggregation( + reverseNested("to_root").subAggregation( + nested("nested_1", "sku").subAggregation( + filter("filter_by_sku", termQuery("sku.sku_type", "bar1")).subAggregation( + count("sku_count").field("sku.sku_type") + ) ) + ) ) - ).get(); + ) + ) + .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -579,7 +643,7 @@ public void testSameParentDocHavingMultipleBuckets() throws Exception { assertThat(nested0.getDocCount(), equalTo(3L)); Terms terms = nested0.getAggregations().get("group_by_category"); assertThat(terms.getBuckets().size(), equalTo(3)); - for (String bucketName : new String[]{"abc", "klm", "xyz"}) { + for (String bucketName : new String[] { "abc", "klm", "xyz" }) { logger.info("Checking results for bucket {}", bucketName); Terms.Bucket bucket = terms.getBucketByKey(bucketName); assertThat(bucket.getDocCount(), equalTo(1L)); @@ -594,26 +658,26 @@ public void testSameParentDocHavingMultipleBuckets() throws Exception { } response = client().prepareSearch("idx3") - .addAggregation( - nested("nested_0", "category").subAggregation( - terms("group_by_category").field("category.name").subAggregation( - reverseNested("to_root").subAggregation( - nested("nested_1", "sku").subAggregation( - filter("filter_by_sku", termQuery("sku.sku_type", "bar1")).subAggregation( - nested("nested_2", "sku.colors").subAggregation( - filter("filter_sku_color", termQuery("sku.colors.name", "red")) - .subAggregation( - reverseNested("reverse_to_sku").path("sku").subAggregation( - count("sku_count").field("sku.sku_type") - ) - ) - ) - ) - ) + .addAggregation( + nested("nested_0", "category").subAggregation( + terms("group_by_category").field("category.name") + .subAggregation( + reverseNested("to_root").subAggregation( + nested("nested_1", "sku").subAggregation( + filter("filter_by_sku", termQuery("sku.sku_type", "bar1")).subAggregation( + nested("nested_2", "sku.colors").subAggregation( + filter("filter_sku_color", termQuery("sku.colors.name", "red")).subAggregation( + reverseNested("reverse_to_sku").path("sku") + .subAggregation(count("sku_count").field("sku.sku_type")) + ) ) + ) ) + ) ) - ).get(); + ) + ) + .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -621,7 +685,7 @@ public void testSameParentDocHavingMultipleBuckets() throws Exception { assertThat(nested0.getDocCount(), equalTo(3L)); terms = nested0.getAggregations().get("group_by_category"); assertThat(terms.getBuckets().size(), equalTo(3)); - for (String bucketName : new String[]{"abc", "klm", "xyz"}) { + for (String bucketName : new String[] { "abc", "klm", "xyz" }) { logger.info("Checking results for bucket {}", bucketName); Terms.Bucket bucket = terms.getBucketByKey(bucketName); assertThat(bucket.getDocCount(), equalTo(1L)); @@ -644,14 +708,17 @@ public void testSameParentDocHavingMultipleBuckets() throws Exception { public void testFieldAlias() { SearchResponse response = client().prepareSearch("idx1") - .addAggregation(nested("nested1", "nested1") - .subAggregation( + .addAggregation( + nested("nested1", "nested1").subAggregation( terms("field2").field("nested1.field2") .subAggregation( - reverseNested("nested1_to_field1") - .subAggregation( - terms("field1").field("alias") - .collectMode(randomFrom(SubAggCollectionMode.values())))))).get(); + reverseNested("nested1_to_field1").subAggregation( + terms("field1").field("alias").collectMode(randomFrom(SubAggCollectionMode.values())) + ) + ) + ) + ) + .get(); assertSearchResponse(response); @@ -662,7 +729,7 @@ public void testFieldAlias() { ReverseNested reverseNested = bucket.getAggregations().get("nested1_to_field1"); Terms reverseNestedTerms = reverseNested.getAggregations().get("field1"); - assertThat(((InternalAggregation)reverseNested).getProperty("field1"), sameInstance(reverseNestedTerms)); + assertThat(((InternalAggregation) reverseNested).getProperty("field1"), sameInstance(reverseNestedTerms)); assertThat(reverseNestedTerms.getBuckets().size(), equalTo(6)); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java index badc322ee5425..ac598d2fff0c9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java @@ -47,42 +47,47 @@ public String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SamplerAggregator.ExecutionMode.values()).toString(); } - @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .setMapping( - "author", "type=keyword", "name", "type=text", "genre", - "type=keyword", "price", "type=float")); + assertAcked( + prepareCreate("test").setSettings( + Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0) + ).setMapping("author", "type=keyword", "name", "type=text", "genre", "type=keyword", "price", "type=float") + ); createIndex("idx_unmapped"); // idx_unmapped_author is same as main index but missing author field - assertAcked(prepareCreate("idx_unmapped_author") - .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .setMapping("name", "type=text", "genre", "type=keyword", "price", "type=float")); + assertAcked( + prepareCreate("idx_unmapped_author").setSettings( + Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0) + ).setMapping("name", "type=text", "genre", "type=keyword", "price", "type=float") + ); ensureGreen(); String data[] = { - // "id,cat,name,price,inStock,author_t,series_t,sequence_i,genre_s", - "0553573403,book,A Game of Thrones,7.99,true,George R.R. Martin,A Song of Ice and Fire,1,fantasy", - "0553579908,book,A Clash of Kings,7.99,true,George R.R. Martin,A Song of Ice and Fire,2,fantasy", - "055357342X,book,A Storm of Swords,7.99,true,George R.R. Martin,A Song of Ice and Fire,3,fantasy", - "0553293354,book,Foundation,17.99,true,Isaac Asimov,Foundation Novels,1,scifi", - "0812521390,book,The Black Company,6.99,false,Glen Cook,The Chronicles of The Black Company,1,fantasy", - "0812550706,book,Ender's Game,6.99,true,Orson Scott Card,Ender,1,scifi", - "0441385532,book,Jhereg,7.95,false,Steven Brust,Vlad Taltos,1,fantasy", - "0380014300,book,Nine Princes In Amber,6.99,true,Roger Zelazny,the Chronicles of Amber,1,fantasy", - "0805080481,book,The Book of Three,5.99,true,Lloyd Alexander,The Chronicles of Prydain,1,fantasy", - "080508049X,book,The Black Cauldron,5.99,true,Lloyd Alexander,The Chronicles of Prydain,2,fantasy" - - }; + // "id,cat,name,price,inStock,author_t,series_t,sequence_i,genre_s", + "0553573403,book,A Game of Thrones,7.99,true,George R.R. Martin,A Song of Ice and Fire,1,fantasy", + "0553579908,book,A Clash of Kings,7.99,true,George R.R. Martin,A Song of Ice and Fire,2,fantasy", + "055357342X,book,A Storm of Swords,7.99,true,George R.R. Martin,A Song of Ice and Fire,3,fantasy", + "0553293354,book,Foundation,17.99,true,Isaac Asimov,Foundation Novels,1,scifi", + "0812521390,book,The Black Company,6.99,false,Glen Cook,The Chronicles of The Black Company,1,fantasy", + "0812550706,book,Ender's Game,6.99,true,Orson Scott Card,Ender,1,scifi", + "0441385532,book,Jhereg,7.95,false,Steven Brust,Vlad Taltos,1,fantasy", + "0380014300,book,Nine Princes In Amber,6.99,true,Roger Zelazny,the Chronicles of Amber,1,fantasy", + "0805080481,book,The Book of Three,5.99,true,Lloyd Alexander,The Chronicles of Prydain,1,fantasy", + "080508049X,book,The Black Cauldron,5.99,true,Lloyd Alexander,The Chronicles of Prydain,2,fantasy" + + }; for (int i = 0; i < data.length; i++) { String[] parts = data[i].split(","); - client().prepareIndex("test").setId("" + i) - .setSource("author", parts[5], "name", parts[2], "genre", parts[8], "price",Float.parseFloat(parts[3])).get(); - client().prepareIndex("idx_unmapped_author").setId("" + i) - .setSource("name", parts[2], "genre", parts[8],"price",Float.parseFloat(parts[3])).get(); + client().prepareIndex("test") + .setId("" + i) + .setSource("author", parts[5], "name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) + .get(); + client().prepareIndex("idx_unmapped_author") + .setId("" + i) + .setSource("name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) + .get(); } client().admin().indices().refresh(new RefreshRequest("test")).get(); } @@ -91,13 +96,14 @@ public void testIssue10719() throws Exception { // Tests that we can refer to nested elements under a sample in a path // statement boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) - .addAggregation(terms("genres") - .field("genre") - .order(BucketOrder.aggregation("sample>max_price.value", asc)) - .subAggregation(sampler("sample").shardSize(100) - .subAggregation(max("max_price").field("price"))) - ).get(); + SearchResponse response = client().prepareSearch("test") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .addAggregation( + terms("genres").field("genre") + .order(BucketOrder.aggregation("sample>max_price.value", asc)) + .subAggregation(sampler("sample").shardSize(100).subAggregation(max("max_price").field("price"))) + ) + .get(); assertSearchResponse(response); Terms genres = response.getAggregations().get("genres"); List genreBuckets = genres.getBuckets(); @@ -121,8 +127,13 @@ public void testIssue10719() throws Exception { public void testSimpleSampler() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) - .setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg).get(); + SearchResponse response = client().prepareSearch("test") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setQuery(new TermQueryBuilder("genre", "fantasy")) + .setFrom(0) + .setSize(60) + .addAggregation(sampleAgg) + .get(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); Terms authors = sample.getAggregations().get("authors"); @@ -139,11 +150,12 @@ public void testUnmappedChildAggNoDiversity() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setQuery(new TermQueryBuilder("genre", "fantasy")) - .setFrom(0).setSize(60) - .addAggregation(sampleAgg) - .get(); + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setQuery(new TermQueryBuilder("genre", "fantasy")) + .setFrom(0) + .setSize(60) + .addAggregation(sampleAgg) + .get(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), equalTo(0L)); @@ -155,11 +167,13 @@ public void testPartiallyUnmappedChildAggNoDiversity() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped", "test") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setQuery(new TermQueryBuilder("genre", "fantasy")) - .setFrom(0).setSize(60).setExplain(true) - .addAggregation(sampleAgg) - .get(); + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setQuery(new TermQueryBuilder("genre", "fantasy")) + .setFrom(0) + .setSize(60) + .setExplain(true) + .addAggregation(sampleAgg) + .get(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), greaterThan(0L)); @@ -170,8 +184,13 @@ public void testPartiallyUnmappedChildAggNoDiversity() throws Exception { public void testRidiculousShardSizeSampler() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(Integer.MAX_VALUE); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) - .setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg).get(); + SearchResponse response = client().prepareSearch("test") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setQuery(new TermQueryBuilder("genre", "fantasy")) + .setFrom(0) + .setSize(60) + .addAggregation(sampleAgg) + .get(); assertSearchResponse(response); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java index 0cb218750672c..e182852f461c3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java @@ -50,41 +50,51 @@ public class ShardReduceIT extends ESIntegTestCase { private IndexRequestBuilder indexDoc(String date, int value) throws Exception { - return client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() - .field("value", value) - .field("ip", "10.0.0." + value) - .field("location", Geohash.stringEncode(5, 52, Geohash.PRECISION)) - .field("date", date) - .field("term-l", 1) - .field("term-d", 1.5) - .field("term-s", "term") - .startObject("nested") - .field("date", date) - .endObject() - .endObject()); + return client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field("value", value) + .field("ip", "10.0.0." + value) + .field("location", Geohash.stringEncode(5, 52, Geohash.PRECISION)) + .field("date", date) + .field("term-l", 1) + .field("term-d", 1.5) + .field("term-s", "term") + .startObject("nested") + .field("date", date) + .endObject() + .endObject() + ); } @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx") - .setMapping("nested", "type=nested", "ip", "type=ip", - "location", "type=geo_point", "term-s", "type=keyword")); - - indexRandom(true, - indexDoc("2014-01-01", 1), - indexDoc("2014-01-02", 2), - indexDoc("2014-01-04", 3)); + assertAcked( + prepareCreate("idx").setMapping( + "nested", + "type=nested", + "ip", + "type=ip", + "location", + "type=geo_point", + "term-s", + "type=keyword" + ) + ); + + indexRandom(true, indexDoc("2014-01-01", 1), indexDoc("2014-01-02", 2), indexDoc("2014-01-04", 3)); ensureSearchable(); } public void testGlobal() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(global("global") - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + global("global").subAggregation( + dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0) + ) + ) + .get(); assertSearchResponse(response); @@ -95,11 +105,13 @@ public void testGlobal() throws Exception { public void testFilter() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(filter("filter", QueryBuilders.matchAllQuery()) - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + filter("filter", QueryBuilders.matchAllQuery()).subAggregation( + dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0) + ) + ) + .get(); assertSearchResponse(response); @@ -110,11 +122,12 @@ public void testFilter() throws Exception { public void testMissing() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(missing("missing").field("foobar") - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + missing("missing").field("foobar") + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + .get(); assertSearchResponse(response); @@ -125,13 +138,16 @@ public void testMissing() throws Exception { public void testGlobalWithFilterWithMissing() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(global("global") - .subAggregation(filter("filter", QueryBuilders.matchAllQuery()) - .subAggregation(missing("missing").field("foobar") - .subAggregation(dateHistogram("histo").field("date") - .fixedInterval(DateHistogramInterval.DAY).minDocCount(0))))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + global("global").subAggregation( + filter("filter", QueryBuilders.matchAllQuery()).subAggregation( + missing("missing").field("foobar") + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + ) + ) + .get(); assertSearchResponse(response); @@ -144,11 +160,13 @@ public void testGlobalWithFilterWithMissing() throws Exception { public void testNested() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(nested("nested", "nested") - .subAggregation(dateHistogram("histo").field("nested.date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + nested("nested", "nested").subAggregation( + dateHistogram("histo").field("nested.date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0) + ) + ) + .get(); assertSearchResponse(response); @@ -159,12 +177,13 @@ public void testNested() throws Exception { public void testStringTerms() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(terms("terms").field("term-s") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + terms("terms").field("term-s") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + .get(); assertSearchResponse(response); @@ -175,12 +194,13 @@ public void testStringTerms() throws Exception { public void testLongTerms() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(terms("terms").field("term-l") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + terms("terms").field("term-l") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + .get(); assertSearchResponse(response); @@ -191,12 +211,13 @@ public void testLongTerms() throws Exception { public void testDoubleTerms() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(terms("terms").field("term-d") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + terms("terms").field("term-d") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + .get(); assertSearchResponse(response); @@ -207,11 +228,13 @@ public void testDoubleTerms() throws Exception { public void testRange() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(range("range").field("value").addRange("r1", 0, 10) - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + range("range").field("value") + .addRange("r1", 0, 10) + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + .get(); assertSearchResponse(response); @@ -222,11 +245,13 @@ public void testRange() throws Exception { public void testDateRange() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(dateRange("range").field("date").addRange("r1", "2014-01-01", "2014-01-10") - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + dateRange("range").field("date") + .addRange("r1", "2014-01-01", "2014-01-10") + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + .get(); assertSearchResponse(response); @@ -237,11 +262,13 @@ public void testDateRange() throws Exception { public void testIpRange() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(ipRange("range").field("ip").addRange("r1", "10.0.0.1", "10.0.0.10") - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + ipRange("range").field("ip") + .addRange("r1", "10.0.0.1", "10.0.0.10") + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + .get(); assertSearchResponse(response); @@ -252,11 +279,13 @@ public void testIpRange() throws Exception { public void testHistogram() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(histogram("topHisto").field("value").interval(5) - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + histogram("topHisto").field("value") + .interval(5) + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + .get(); assertSearchResponse(response); @@ -267,11 +296,13 @@ public void testHistogram() throws Exception { public void testDateHistogram() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(dateHistogram("topHisto").field("date").calendarInterval(DateHistogramInterval.MONTH) - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + dateHistogram("topHisto").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + .get(); assertSearchResponse(response); @@ -283,11 +314,12 @@ public void testDateHistogram() throws Exception { public void testGeoHashGrid() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(geohashGrid("grid").field("location") - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + geohashGrid("grid").field("location") + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + .get(); assertSearchResponse(response); @@ -298,11 +330,12 @@ public void testGeoHashGrid() throws Exception { public void testGeoTileGrid() throws Exception { SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(geotileGrid("grid").field("location") - .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY) - .minDocCount(0))) - .get(); + .setQuery(QueryBuilders.matchAllQuery()) + .addAggregation( + geotileGrid("grid").field("location") + .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) + ) + .get(); assertSearchResponse(response); @@ -311,5 +344,4 @@ public void testGeoTileGrid() throws Exception { assertThat(histo.getBuckets().size(), equalTo(4)); } - } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java index 3ae1f3ff4d283..dc3cd1c897780 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java @@ -27,12 +27,13 @@ public void testNoShardSizeString() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)) + ) + .get(); - Terms terms = response.getAggregations().get("keys"); + Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); assertThat(buckets.size(), equalTo(3)); Map expected = new HashMap<>(); @@ -50,12 +51,17 @@ public void testShardSizeEqualsSizeString() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3).shardSize(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key") + .size(3) + .shardSize(3) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.count(false)) + ) + .get(); - Terms terms = response.getAggregations().get("keys"); + Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); assertThat(buckets.size(), equalTo(3)); Map expected = new HashMap<>(); @@ -74,10 +80,15 @@ public void testWithShardSizeString() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key") + .size(3) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .shardSize(5) + .order(BucketOrder.count(false)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); @@ -97,11 +108,17 @@ public void testWithShardSizeStringSingleShard() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx").setRouting(routing1) - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .setRouting(routing1) + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key") + .size(3) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .shardSize(5) + .order(BucketOrder.count(false)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); @@ -110,7 +127,7 @@ public void testWithShardSizeStringSingleShard() throws Exception { expected.put("1", 5L); expected.put("2", 4L); expected.put("3", 3L); // <-- count is now fixed - for (Terms.Bucket bucket: buckets) { + for (Terms.Bucket bucket : buckets) { assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKey()))); } } @@ -121,12 +138,13 @@ public void testNoShardSizeTermOrderString() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)) + ) + .get(); - Terms terms = response.getAggregations().get("keys"); + Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); assertThat(buckets.size(), equalTo(3)); Map expected = new HashMap<>(); @@ -144,10 +162,11 @@ public void testNoShardSizeLong() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); @@ -167,10 +186,15 @@ public void testShardSizeEqualsSizeLong() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3).shardSize(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key") + .size(3) + .shardSize(3) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.count(false)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); @@ -190,10 +214,15 @@ public void testWithShardSizeLong() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key") + .size(3) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .shardSize(5) + .order(BucketOrder.count(false)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); @@ -213,11 +242,17 @@ public void testWithShardSizeLongSingleShard() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx").setRouting(routing1) - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .setRouting(routing1) + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key") + .size(3) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .shardSize(5) + .order(BucketOrder.count(false)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); @@ -237,10 +272,11 @@ public void testNoShardSizeTermOrderLong() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); @@ -260,10 +296,11 @@ public void testNoShardSizeDouble() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); @@ -283,10 +320,15 @@ public void testShardSizeEqualsSizeDouble() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3).shardSize(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key") + .size(3) + .shardSize(3) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.count(false)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); @@ -306,10 +348,15 @@ public void testWithShardSizeDouble() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key") + .size(3) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .shardSize(5) + .order(BucketOrder.count(false)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); @@ -328,11 +375,17 @@ public void testWithShardSizeDoubleSingleShard() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx").setRouting(routing1) - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .setRouting(routing1) + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key") + .size(3) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .shardSize(5) + .order(BucketOrder.count(false)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); @@ -352,10 +405,11 @@ public void testNoShardSizeTermOrderDouble() throws Exception { indexData(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(terms("keys").field("key").size(3) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)) + ) + .get(); Terms terms = response.getAggregations().get("keys"); List buckets = terms.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 4395f34758208..1aa729a7119c8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -86,12 +86,15 @@ public Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); scripts.put("script_with_params", params -> { double factor = ((Number) params.get("param")).doubleValue(); - return factor * (longValue(params.get("_subset_freq")) + longValue(params.get("_subset_size")) + - longValue(params.get("_superset_freq")) + longValue(params.get("_superset_size"))) / factor; + return factor * (longValue(params.get("_subset_freq")) + longValue(params.get("_subset_size")) + longValue( + params.get("_superset_freq") + ) + longValue(params.get("_superset_size"))) / factor; }); - scripts.put("script_no_params", params -> - longValue(params.get("_subset_freq")) + longValue(params.get("_subset_size")) + - longValue(params.get("_superset_freq")) + longValue(params.get("_superset_size")) + scripts.put( + "script_no_params", + params -> longValue(params.get("_subset_freq")) + longValue(params.get("_subset_size")) + longValue( + params.get("_superset_freq") + ) + longValue(params.get("_superset_size")) ); return scripts; } @@ -116,21 +119,18 @@ public void testXContentResponse() throws Exception { SharedSignificantTermsTestMethods.index01Docs(type, settings, this); SearchRequestBuilder request; - if ("text".equals(type) && randomBoolean() ) { + if ("text".equals(type) && randomBoolean()) { // Use significant_text on text fields but occasionally run with alternative of // significant_terms on legacy fieldData=true too. request = client().prepareSearch(INDEX_NAME) - .addAggregation(terms("class").field(CLASS_FIELD) - .subAggregation(significantText("sig_terms", TEXT_FIELD))); + .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD))); } else { request = client().prepareSearch(INDEX_NAME) - .addAggregation(terms("class").field(CLASS_FIELD) - .subAggregation(significantTerms("sig_terms").field(TEXT_FIELD))); + .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD))); } SearchResponse response = request.get(); - assertSearchResponse(response); StringTerms classes = response.getAggregations().get("class"); assertThat(classes.getBuckets().size(), equalTo(2)); @@ -150,57 +150,57 @@ public void testXContentResponse() throws Exception { responseBuilder.endObject(); String result = "{\"class\":{\"doc_count_error_upper_bound\":0,\"sum_other_doc_count\":0," - + "\"buckets\":[" - + "{" - + "\"key\":\"0\"," - + "\"doc_count\":4," - + "\"sig_terms\":{" - + "\"doc_count\":4," - + "\"bg_count\":7," - + "\"buckets\":[" - + "{" - + "\"key\":" + (type.equals("long") ? "0," : "\"0\",") - + "\"doc_count\":4," - + "\"score\":0.39999999999999997," - + "\"bg_count\":5" - + "}" - + "]" - + "}" - + "}," - + "{" - + "\"key\":\"1\"," - + "\"doc_count\":3," - + "\"sig_terms\":{" - + "\"doc_count\":3," - + "\"bg_count\":7," - + "\"buckets\":[" - + "{" - + "\"key\":" + (type.equals("long") ? "1," : "\"1\",") - + "\"doc_count\":3," - + "\"score\":0.75," - + "\"bg_count\":4" - + "}]}}]}}"; + + "\"buckets\":[" + + "{" + + "\"key\":\"0\"," + + "\"doc_count\":4," + + "\"sig_terms\":{" + + "\"doc_count\":4," + + "\"bg_count\":7," + + "\"buckets\":[" + + "{" + + "\"key\":" + + (type.equals("long") ? "0," : "\"0\",") + + "\"doc_count\":4," + + "\"score\":0.39999999999999997," + + "\"bg_count\":5" + + "}" + + "]" + + "}" + + "}," + + "{" + + "\"key\":\"1\"," + + "\"doc_count\":3," + + "\"sig_terms\":{" + + "\"doc_count\":3," + + "\"bg_count\":7," + + "\"buckets\":[" + + "{" + + "\"key\":" + + (type.equals("long") ? "1," : "\"1\",") + + "\"doc_count\":3," + + "\"score\":0.75," + + "\"bg_count\":4" + + "}]}}]}}"; assertThat(Strings.toString(responseBuilder), equalTo(result)); } public void testPopularTermManyDeletedDocs() throws Exception { String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; - assertAcked(prepareCreate(INDEX_NAME).setSettings(settings, XContentType.JSON) - .setMapping("text", "type=keyword", CLASS_FIELD, "type=keyword")); - String[] cat1v1 = {"constant", "one"}; - String[] cat1v2 = {"constant", "uno"}; - String[] cat2v1 = {"constant", "two"}; - String[] cat2v2 = {"constant", "duo"}; + assertAcked( + prepareCreate(INDEX_NAME).setSettings(settings, XContentType.JSON) + .setMapping("text", "type=keyword", CLASS_FIELD, "type=keyword") + ); + String[] cat1v1 = { "constant", "one" }; + String[] cat1v2 = { "constant", "uno" }; + String[] cat2v1 = { "constant", "two" }; + String[] cat2v2 = { "constant", "duo" }; List indexRequestBuilderList = new ArrayList<>(); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("1") - .setSource(TEXT_FIELD, cat1v1, CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("2") - .setSource(TEXT_FIELD, cat1v2, CLASS_FIELD, "1")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("3") - .setSource(TEXT_FIELD, cat2v1, CLASS_FIELD, "2")); - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("4") - .setSource(TEXT_FIELD, cat2v2, CLASS_FIELD, "2")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("1").setSource(TEXT_FIELD, cat1v1, CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("2").setSource(TEXT_FIELD, cat1v2, CLASS_FIELD, "1")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("3").setSource(TEXT_FIELD, cat2v1, CLASS_FIELD, "2")); + indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME).setId("4").setSource(TEXT_FIELD, cat2v2, CLASS_FIELD, "2")); indexRandom(true, false, indexRequestBuilderList); // Now create some holes in the index with selective deletes caused by updates. @@ -215,26 +215,15 @@ public void testPopularTermManyDeletedDocs() throws Exception { } indexRandom(true, false, indexRequestBuilderList); - SearchRequestBuilder request; - if (randomBoolean() ) { + if (randomBoolean()) { request = client().prepareSearch(INDEX_NAME) .addAggregation( - terms("class") - .field(CLASS_FIELD) - .subAggregation( - significantTerms("sig_terms") - .field(TEXT_FIELD) - .minDocCount(1))); - } else - { + terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD).minDocCount(1)) + ); + } else { request = client().prepareSearch(INDEX_NAME) - .addAggregation( - terms("class") - .field(CLASS_FIELD) - .subAggregation( - significantText("sig_terms", TEXT_FIELD) - .minDocCount(1))); + .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD).minDocCount(1))); } request.get(); @@ -254,32 +243,33 @@ public void testBackgroundVsSeparateSet() throws Exception { // 1. terms agg on class and significant terms // 2. filter buckets and set the background to the other class and set is_background false // both should yield exact same result - public void testBackgroundVsSeparateSet(SignificanceHeuristic significanceHeuristicExpectingSuperset, - SignificanceHeuristic significanceHeuristicExpectingSeparateSets, - String type) throws Exception { + public void testBackgroundVsSeparateSet( + SignificanceHeuristic significanceHeuristicExpectingSuperset, + SignificanceHeuristic significanceHeuristicExpectingSeparateSets, + String type + ) throws Exception { final boolean useSigText = randomBoolean() && type.equals("text"); SearchRequestBuilder request1; if (useSigText) { request1 = client().prepareSearch(INDEX_NAME) - .addAggregation(terms("class") - .field(CLASS_FIELD) - .subAggregation( - significantText("sig_terms", TEXT_FIELD) - .minDocCount(1) - .significanceHeuristic( - significanceHeuristicExpectingSuperset))); - }else - { + .addAggregation( + terms("class").field(CLASS_FIELD) + .subAggregation( + significantText("sig_terms", TEXT_FIELD).minDocCount(1) + .significanceHeuristic(significanceHeuristicExpectingSuperset) + ) + ); + } else { request1 = client().prepareSearch(INDEX_NAME) - .addAggregation(terms("class") - .field(CLASS_FIELD) - .subAggregation( - significantTerms("sig_terms") - .field(TEXT_FIELD) - .minDocCount(1) - .significanceHeuristic( - significanceHeuristicExpectingSuperset))); + .addAggregation( + terms("class").field(CLASS_FIELD) + .subAggregation( + significantTerms("sig_terms").field(TEXT_FIELD) + .minDocCount(1) + .significanceHeuristic(significanceHeuristicExpectingSuperset) + ) + ); } SearchResponse response1 = request1.get(); @@ -288,31 +278,38 @@ public void testBackgroundVsSeparateSet(SignificanceHeuristic significanceHeuris SearchRequestBuilder request2; if (useSigText) { request2 = client().prepareSearch(INDEX_NAME) - .addAggregation(filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")) - .subAggregation(significantText("sig_terms", TEXT_FIELD) - .minDocCount(1) - .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "1")) - .significanceHeuristic(significanceHeuristicExpectingSeparateSets))) - .addAggregation(filter("1", QueryBuilders.termQuery(CLASS_FIELD, "1")) - .subAggregation(significantText("sig_terms", TEXT_FIELD) - .minDocCount(1) - .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "0")) - .significanceHeuristic(significanceHeuristicExpectingSeparateSets))); - }else - { + .addAggregation( + filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation( + significantText("sig_terms", TEXT_FIELD).minDocCount(1) + .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "1")) + .significanceHeuristic(significanceHeuristicExpectingSeparateSets) + ) + ) + .addAggregation( + filter("1", QueryBuilders.termQuery(CLASS_FIELD, "1")).subAggregation( + significantText("sig_terms", TEXT_FIELD).minDocCount(1) + .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "0")) + .significanceHeuristic(significanceHeuristicExpectingSeparateSets) + ) + ); + } else { request2 = client().prepareSearch(INDEX_NAME) - .addAggregation(filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")) - .subAggregation(significantTerms("sig_terms") - .field(TEXT_FIELD) - .minDocCount(1) - .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "1")) - .significanceHeuristic(significanceHeuristicExpectingSeparateSets))) - .addAggregation(filter("1", QueryBuilders.termQuery(CLASS_FIELD, "1")) - .subAggregation(significantTerms("sig_terms") - .field(TEXT_FIELD) - .minDocCount(1) - .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "0")) - .significanceHeuristic(significanceHeuristicExpectingSeparateSets))); + .addAggregation( + filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation( + significantTerms("sig_terms").field(TEXT_FIELD) + .minDocCount(1) + .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "1")) + .significanceHeuristic(significanceHeuristicExpectingSeparateSets) + ) + ) + .addAggregation( + filter("1", QueryBuilders.termQuery(CLASS_FIELD, "1")).subAggregation( + significantTerms("sig_terms").field(TEXT_FIELD) + .minDocCount(1) + .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "0")) + .significanceHeuristic(significanceHeuristicExpectingSeparateSets) + ) + ); } SearchResponse response2 = request2.get(); @@ -351,21 +348,32 @@ public void testScoresEqualForPositiveAndNegative() throws Exception { public void testScoresEqualForPositiveAndNegative(SignificanceHeuristic heuristic) throws Exception { - //check that results for both classes are the same with exclude negatives = false and classes are routing ids + // check that results for both classes are the same with exclude negatives = false and classes are routing ids SearchRequestBuilder request; if (randomBoolean()) { request = client().prepareSearch("test") - .addAggregation(terms("class").field("class").subAggregation(significantTerms("mySignificantTerms") - .field("text") - .executionHint(randomExecutionHint()) - .significanceHeuristic(heuristic) - .minDocCount(1).shardSize(1000).size(1000))); - }else - { + .addAggregation( + terms("class").field("class") + .subAggregation( + significantTerms("mySignificantTerms").field("text") + .executionHint(randomExecutionHint()) + .significanceHeuristic(heuristic) + .minDocCount(1) + .shardSize(1000) + .size(1000) + ) + ); + } else { request = client().prepareSearch("test") - .addAggregation(terms("class").field("class").subAggregation(significantText("mySignificantTerms", "text") - .significanceHeuristic(heuristic) - .minDocCount(1).shardSize(1000).size(1000))); + .addAggregation( + terms("class").field("class") + .subAggregation( + significantText("mySignificantTerms", "text").significanceHeuristic(heuristic) + .minDocCount(1) + .shardSize(1000) + .size(1000) + ) + ); } SearchResponse response = request.get(); assertSearchResponse(response); @@ -397,17 +405,15 @@ public void testSubAggregations() throws Exception { QueryBuilder query = QueryBuilders.termsQuery(TEXT_FIELD, "a", "b"); AggregationBuilder subAgg = terms("class").field(CLASS_FIELD); - AggregationBuilder agg = significantTerms("significant_terms") - .field(TEXT_FIELD) + AggregationBuilder agg = significantTerms("significant_terms").field(TEXT_FIELD) .executionHint(randomExecutionHint()) .significanceHeuristic(new ChiSquare(true, true)) - .minDocCount(1).shardSize(1000).size(1000) + .minDocCount(1) + .shardSize(1000) + .size(1000) .subAggregation(subAgg); - SearchResponse response = client().prepareSearch("test") - .setQuery(query) - .addAggregation(agg) - .get(); + SearchResponse response = client().prepareSearch("test").setQuery(query).addAggregation(agg).get(); assertSearchResponse(response); SignificantTerms sigTerms = response.getAggregations().get("significant_terms"); @@ -420,36 +426,35 @@ public void testSubAggregations() throws Exception { } private void indexEqualTestData() throws ExecutionException, InterruptedException { - assertAcked(prepareCreate("test") - .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .setMapping("text", "type=text,fielddata=true", "class", "type=keyword")); + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) + .setMapping("text", "type=text,fielddata=true", "class", "type=keyword") + ); createIndex("idx_unmapped"); ensureGreen(); String data[] = { - "A\ta", - "A\ta", - "A\tb", - "A\tb", - "A\tb", - "B\tc", - "B\tc", - "B\tc", - "B\tc", - "B\td", - "B\td", - "B\td", - "B\td", - "B\td", - "A\tc d", - "B\ta b" - }; + "A\ta", + "A\ta", + "A\tb", + "A\tb", + "A\tb", + "B\tc", + "B\tc", + "B\tc", + "B\tc", + "B\td", + "B\td", + "B\td", + "B\td", + "B\td", + "A\tc d", + "B\ta b" }; List indexRequestBuilders = new ArrayList<>(); for (int i = 0; i < data.length; i++) { String[] parts = data[i].split("\t"); - indexRequestBuilders.add(client().prepareIndex("test").setId("" + i) - .setSource("class", parts[0], "text", parts[1])); + indexRequestBuilders.add(client().prepareIndex("test").setId("" + i).setSource("class", parts[0], "text", parts[1])); } indexRandom(true, false, indexRequestBuilders); } @@ -462,26 +467,38 @@ public void testScriptScore() throws ExecutionException, InterruptedException, I SearchRequestBuilder request; if ("text".equals(type) && randomBoolean()) { request = client().prepareSearch(INDEX_NAME) - .addAggregation(terms("class").field(CLASS_FIELD) - .subAggregation(significantText("mySignificantTerms", TEXT_FIELD) - .significanceHeuristic(scriptHeuristic) - .minDocCount(1).shardSize(2).size(2))); + .addAggregation( + terms("class").field(CLASS_FIELD) + .subAggregation( + significantText("mySignificantTerms", TEXT_FIELD).significanceHeuristic(scriptHeuristic) + .minDocCount(1) + .shardSize(2) + .size(2) + ) + ); } else { request = client().prepareSearch(INDEX_NAME) - .addAggregation(terms("class").field(CLASS_FIELD) - .subAggregation(significantTerms("mySignificantTerms") - .field(TEXT_FIELD) - .executionHint(randomExecutionHint()) - .significanceHeuristic(scriptHeuristic) - .minDocCount(1).shardSize(2).size(2))); + .addAggregation( + terms("class").field(CLASS_FIELD) + .subAggregation( + significantTerms("mySignificantTerms").field(TEXT_FIELD) + .executionHint(randomExecutionHint()) + .significanceHeuristic(scriptHeuristic) + .minDocCount(1) + .shardSize(2) + .size(2) + ) + ); } SearchResponse response = request.get(); assertSearchResponse(response); for (Terms.Bucket classBucket : ((Terms) response.getAggregations().get("class")).getBuckets()) { SignificantTerms sigTerms = classBucket.getAggregations().get("mySignificantTerms"); for (SignificantTerms.Bucket bucket : sigTerms.getBuckets()) { - assertThat(bucket.getSignificanceScore(), - is((double) bucket.getSubsetDf() + bucket.getSubsetSize() + bucket.getSupersetDf() + bucket.getSupersetSize())); + assertThat( + bucket.getSignificanceScore(), + is((double) bucket.getSubsetDf() + bucket.getSubsetSize() + bucket.getSupersetDf() + bucket.getSupersetSize()) + ); } } } @@ -504,7 +521,7 @@ private void indexRandomFrequencies01(String type) throws ExecutionException, In textMappings += ",fielddata=true"; } assertAcked(prepareCreate(INDEX_NAME).setMapping(TEXT_FIELD, textMappings, CLASS_FIELD, "type=keyword")); - String[] gb = {"0", "1"}; + String[] gb = { "0", "1" }; List indexRequestBuilderList = new ArrayList<>(); for (int i = 0; i < randomInt(20); i++) { int randNum = randomInt(2); @@ -514,8 +531,9 @@ private void indexRandomFrequencies01(String type) throws ExecutionException, In } else { text[0] = gb[randNum]; } - indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME) - .setSource(TEXT_FIELD, text, CLASS_FIELD, randomBoolean() ? "one" : "zero")); + indexRequestBuilderList.add( + client().prepareIndex(INDEX_NAME).setSource(TEXT_FIELD, text, CLASS_FIELD, randomBoolean() ? "one" : "zero") + ); } indexRandom(true, indexRequestBuilderList); } @@ -529,17 +547,40 @@ public void testReduceFromSeveralShards() throws IOException, ExecutionException * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("s", "type=long", "t", "type=text") + assertAcked( + prepareCreate("cache_test_idx").setMapping("s", "type=long", "t", "type=text") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1, "t", "foo"), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2, "t", "bar")); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1, "t", "foo"), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2, "t", "bar") + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached ScriptHeuristic scriptHeuristic = new ScriptHeuristic( @@ -548,35 +589,79 @@ public void testScriptCaching() throws Exception { boolean useSigText = randomBoolean(); SearchResponse r; if (useSigText) { - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(significantText("foo", "t").significanceHeuristic(scriptHeuristic)).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(significantText("foo", "t").significanceHeuristic(scriptHeuristic)) + .get(); } else { - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(significantTerms("foo").field("s").significanceHeuristic(scriptHeuristic)).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(significantTerms("foo").field("s").significanceHeuristic(scriptHeuristic)) + .get(); } assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached scriptHeuristic = getScriptSignificanceHeuristic(); useSigText = randomBoolean(); if (useSigText) { - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(significantText("foo", "t").significanceHeuristic(scriptHeuristic)).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(significantText("foo", "t").significanceHeuristic(scriptHeuristic)) + .get(); } else { - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(significantTerms("foo").field("s").significanceHeuristic(scriptHeuristic)).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(significantTerms("foo").field("s").significanceHeuristic(scriptHeuristic)) + .get(); } assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal if (useSigText) { @@ -586,9 +671,27 @@ public void testScriptCaching() throws Exception { } assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java index a2e76892342ba..c22d38e3f79e8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -43,7 +43,6 @@ public class TermsDocCountErrorIT extends ESIntegTestCase { private static final String LONG_FIELD_NAME = "l_value"; private static final String DOUBLE_FIELD_NAME = "d_value"; - public static String randomExecutionHint() { return randomBoolean() ? null : randomFrom(ExecutionMode.values()).toString(); } @@ -52,42 +51,55 @@ public static String randomExecutionHint() { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping(STRING_FIELD_NAME, "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping(STRING_FIELD_NAME, "type=keyword").get()); List builders = new ArrayList<>(); int numDocs = between(10, 200); - int numUniqueTerms = between(2,numDocs/2); + int numUniqueTerms = between(2, numDocs / 2); for (int i = 0; i < numDocs; i++) { - builders.add(client().prepareIndex("idx").setId(""+i).setSource(jsonBuilder() - .startObject() - .field(STRING_FIELD_NAME, "val" + randomInt(numUniqueTerms)) - .field(LONG_FIELD_NAME, randomInt(numUniqueTerms)) - .field(DOUBLE_FIELD_NAME, 1.0 * randomInt(numUniqueTerms)) - .endObject())); + builders.add( + client().prepareIndex("idx") + .setId("" + i) + .setSource( + jsonBuilder().startObject() + .field(STRING_FIELD_NAME, "val" + randomInt(numUniqueTerms)) + .field(LONG_FIELD_NAME, randomInt(numUniqueTerms)) + .field(DOUBLE_FIELD_NAME, 1.0 * randomInt(numUniqueTerms)) + .endObject() + ) + ); } - assertAcked(prepareCreate("idx_single_shard") - .setMapping(STRING_FIELD_NAME, "type=keyword") - .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1))); + assertAcked( + prepareCreate("idx_single_shard").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) + ); for (int i = 0; i < numDocs; i++) { - builders.add(client().prepareIndex("idx_single_shard").setId(""+i).setSource(jsonBuilder() - .startObject() - .field(STRING_FIELD_NAME, "val" + randomInt(numUniqueTerms)) - .field(LONG_FIELD_NAME, randomInt(numUniqueTerms)) - .field(DOUBLE_FIELD_NAME, 1.0 * randomInt(numUniqueTerms)) - .endObject())); + builders.add( + client().prepareIndex("idx_single_shard") + .setId("" + i) + .setSource( + jsonBuilder().startObject() + .field(STRING_FIELD_NAME, "val" + randomInt(numUniqueTerms)) + .field(LONG_FIELD_NAME, randomInt(numUniqueTerms)) + .field(DOUBLE_FIELD_NAME, 1.0 * randomInt(numUniqueTerms)) + .endObject() + ) + ); } - numRoutingValues = between(1,40); - assertAcked(prepareCreate("idx_with_routing") - .setMapping("{ \"_routing\" : { \"required\" : true } }")); + numRoutingValues = between(1, 40); + assertAcked(prepareCreate("idx_with_routing").setMapping("{ \"_routing\" : { \"required\" : true } }")); for (int i = 0; i < numDocs; i++) { - builders.add(client().prepareIndex("idx_single_shard").setId("" + i) - .setRouting(String.valueOf(randomInt(numRoutingValues))) - .setSource(jsonBuilder() - .startObject() - .field(STRING_FIELD_NAME, "val" + randomInt(numUniqueTerms)) - .field(LONG_FIELD_NAME, randomInt(numUniqueTerms)) - .field(DOUBLE_FIELD_NAME, 1.0 * randomInt(numUniqueTerms)) - .endObject())); + builders.add( + client().prepareIndex("idx_single_shard") + .setId("" + i) + .setRouting(String.valueOf(randomInt(numRoutingValues))) + .setSource( + jsonBuilder().startObject() + .field(STRING_FIELD_NAME, "val" + randomInt(numUniqueTerms)) + .field(LONG_FIELD_NAME, randomInt(numUniqueTerms)) + .field(DOUBLE_FIELD_NAME, 1.0 * randomInt(numUniqueTerms)) + .endObject() + ) + ); } Map shard0DocsPerTerm = new HashMap<>(); @@ -127,25 +139,13 @@ public void setupSuiteScopeCluster() throws Exception { shard2DocsPerTerm.put("D", 1); buildIndex(shard2DocsPerTerm, "idx_fixed_docs_2", 2, builders); - Map shard3DocsPerTerm = Map.of( - "A", 1, - "B", 1, - "C", 1 - ); + Map shard3DocsPerTerm = Map.of("A", 1, "B", 1, "C", 1); buildIndex(shard3DocsPerTerm, "idx_fixed_docs_3", 3, builders); - Map shard4DocsPerTerm = Map.of( - "K", 1, - "L", 1, - "M", 1 - ); + Map shard4DocsPerTerm = Map.of("K", 1, "L", 1, "M", 1); buildIndex(shard4DocsPerTerm, "idx_fixed_docs_4", 4, builders); - Map shard5DocsPerTerm = Map.of( - "X", 1, - "Y", 1, - "Z", 1 - ); + Map shard5DocsPerTerm = Map.of("X", 1, "Y", 1, "Z", 1); buildIndex(shard5DocsPerTerm, "idx_fixed_docs_5", 5, builders); indexRandom(true, builders); @@ -194,7 +194,7 @@ private void assertDocCountErrorWithinBounds(int size, SearchResponse accurateRe assertThat(testBucket.getDocCount() - testBucket.getDocCountError(), lessThanOrEqualTo(accurateBucket.getDocCount())); } - for (Terms.Bucket accurateBucket: accurateTerms.getBuckets()) { + for (Terms.Bucket accurateBucket : accurateTerms.getBuckets()) { assertThat(accurateBucket, notNullValue()); Terms.Bucket testBucket = accurateTerms.getBucketByKey(accurateBucket.getKeyAsString()); if (testBucket == null) { @@ -250,7 +250,7 @@ private void assertUnboundedDocCountError(int size, SearchResponse accurateRespo Terms testTerms = testResponse.getAggregations().get("terms"); assertThat(testTerms, notNullValue()); assertThat(testTerms.getName(), equalTo("terms")); - assertThat(testTerms.getDocCountError(),anyOf(equalTo(-1L), equalTo(0L))); + assertThat(testTerms.getDocCountError(), anyOf(equalTo(-1L), equalTo(0L))); List testBuckets = testTerms.getBuckets(); assertThat(testBuckets.size(), lessThanOrEqualTo(size)); assertThat(accurateTerms.getBuckets().size(), greaterThanOrEqualTo(testBuckets.size())); @@ -268,25 +268,28 @@ public void testStringValueField() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -297,25 +300,28 @@ public void testStringValueFieldSingleShard() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -327,15 +333,16 @@ public void testStringValueFieldWithRouting() throws Exception { int shardSize = randomIntBetween(size, size * 2); SearchResponse testResponse = client().prepareSearch("idx_with_routing") - .setRouting(String.valueOf(between(1, numRoutingValues))) - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .setRouting(String.valueOf(between(1, numRoutingValues))) + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -346,27 +353,30 @@ public void testStringValueFieldDocCountAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -377,27 +387,30 @@ public void testStringValueFieldTermSortAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -408,27 +421,30 @@ public void testStringValueFieldTermSortDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -439,29 +455,32 @@ public void testStringValueFieldSubAggAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ) + .get(); assertSearchResponse(testResponse); @@ -472,29 +491,32 @@ public void testStringValueFieldSubAggDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ) + .get(); assertSearchResponse(testResponse); @@ -505,25 +527,28 @@ public void testLongValueField() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -534,25 +559,28 @@ public void testLongValueFieldSingleShard() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -564,15 +592,16 @@ public void testLongValueFieldWithRouting() throws Exception { int shardSize = randomIntBetween(size, size * 2); SearchResponse testResponse = client().prepareSearch("idx_with_routing") - .setRouting(String.valueOf(between(1, numRoutingValues))) - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .setRouting(String.valueOf(between(1, numRoutingValues))) + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -583,27 +612,30 @@ public void testLongValueFieldDocCountAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -614,27 +646,30 @@ public void testLongValueFieldTermSortAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -645,27 +680,30 @@ public void testLongValueFieldTermSortDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -676,29 +714,32 @@ public void testLongValueFieldSubAggAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ) + .get(); assertSearchResponse(testResponse); @@ -709,29 +750,32 @@ public void testLongValueFieldSubAggDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(DOUBLE_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(DOUBLE_FIELD_NAME)) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(DOUBLE_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(DOUBLE_FIELD_NAME)) + ) + .get(); assertSearchResponse(testResponse); @@ -742,25 +786,28 @@ public void testDoubleValueField() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -771,25 +818,28 @@ public void testDoubleValueFieldSingleShard() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -801,15 +851,16 @@ public void testDoubleValueFieldWithRouting() throws Exception { int shardSize = randomIntBetween(size, size * 2); SearchResponse testResponse = client().prepareSearch("idx_with_routing") - .setRouting(String.valueOf(between(1, numRoutingValues))) - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .setRouting(String.valueOf(between(1, numRoutingValues))) + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -820,27 +871,30 @@ public void testDoubleValueFieldDocCountAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -851,27 +905,30 @@ public void testDoubleValueFieldTermSortAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -882,27 +939,30 @@ public void testDoubleValueFieldTermSortDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(testResponse); @@ -913,29 +973,32 @@ public void testDoubleValueFieldSubAggAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ) + .get(); assertSearchResponse(testResponse); @@ -946,29 +1009,32 @@ public void testDoubleValueFieldSubAggDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000).shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ) + .get(); assertSearchResponse(accurateResponse); SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ) + .get(); assertSearchResponse(testResponse); @@ -982,13 +1048,15 @@ public void testDoubleValueFieldSubAggDesc() throws Exception { */ public void testFixedDocs() throws Exception { SearchResponse response = client().prepareSearch("idx_fixed_docs_0", "idx_fixed_docs_1", "idx_fixed_docs_2") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(5).shardSize(5) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(5) + .shardSize(5) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); @@ -1016,7 +1084,6 @@ public void testFixedDocs() throws Exception { assertThat(bucket.getDocCount(), equalTo(50L)); assertThat(bucket.getDocCountError(), equalTo(15L)); - bucket = buckets.get(3); assertThat(bucket, notNullValue()); assertThat(bucket.getKey(), equalTo("G")); @@ -1036,12 +1103,14 @@ public void testFixedDocs() throws Exception { */ public void testIncrementalReduction() { SearchResponse response = client().prepareSearch("idx_fixed_docs_3", "idx_fixed_docs_4", "idx_fixed_docs_5") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(5).shardSize(5) - .collectMode(randomFrom(SubAggCollectionMode.values()))) + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(5) + .shardSize(5) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index 167c452cbc0fd..4a431fbf48b40 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -46,40 +46,50 @@ public void testShardMinDocCountSignificantTermsTest() throws Exception { } else { textMappings = "type=text,fielddata=true"; } - assertAcked(prepareCreate(index).setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .setMapping("text", textMappings)); + assertAcked( + prepareCreate(index).setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) + .setMapping("text", textMappings) + ); List indexBuilders = new ArrayList<>(); - addTermsDocs("1", 1, 0, indexBuilders);//high score but low doc freq + addTermsDocs("1", 1, 0, indexBuilders);// high score but low doc freq addTermsDocs("2", 1, 0, indexBuilders); addTermsDocs("3", 1, 0, indexBuilders); addTermsDocs("4", 1, 0, indexBuilders); - addTermsDocs("5", 3, 1, indexBuilders);//low score but high doc freq + addTermsDocs("5", 3, 1, indexBuilders);// low score but high doc freq addTermsDocs("6", 3, 1, indexBuilders); addTermsDocs("7", 0, 3, indexBuilders);// make sure the terms all get score > 0 except for this one indexRandom(true, false, indexBuilders); // first, check that indeed when not setting the shardMinDocCount parameter 0 terms are returned SearchResponse response = client().prepareSearch(index) - .addAggregation( - (filter("inclass", QueryBuilders.termQuery("class", true))) - .subAggregation(significantTerms("mySignificantTerms").field("text").minDocCount(2).size(2).shardSize(2) - .executionHint(randomExecutionHint())) + .addAggregation( + (filter("inclass", QueryBuilders.termQuery("class", true))).subAggregation( + significantTerms("mySignificantTerms").field("text") + .minDocCount(2) + .size(2) + .shardSize(2) + .executionHint(randomExecutionHint()) ) - .get(); + ) + .get(); assertSearchResponse(response); InternalFilter filteredBucket = response.getAggregations().get("inclass"); SignificantTerms sigterms = filteredBucket.getAggregations().get("mySignificantTerms"); assertThat(sigterms.getBuckets().size(), equalTo(0)); - response = client().prepareSearch(index) - .addAggregation( - (filter("inclass", QueryBuilders.termQuery("class", true))) - .subAggregation(significantTerms("mySignificantTerms").field("text").minDocCount(2).shardSize(2) - .shardMinDocCount(2).size(2).executionHint(randomExecutionHint())) + .addAggregation( + (filter("inclass", QueryBuilders.termQuery("class", true))).subAggregation( + significantTerms("mySignificantTerms").field("text") + .minDocCount(2) + .shardSize(2) + .shardMinDocCount(2) + .size(2) + .executionHint(randomExecutionHint()) ) - .get(); + ) + .get(); assertSearchResponse(response); filteredBucket = response.getAggregations().get("inclass"); sigterms = filteredBucket.getAggregations().get("mySignificantTerms"); @@ -99,41 +109,52 @@ private void addTermsDocs(String term, int numInClass, int numNotInClass, List indexBuilders = new ArrayList<>(); - addTermsDocs("1", 1, indexBuilders);//low doc freq but high score + addTermsDocs("1", 1, indexBuilders);// low doc freq but high score addTermsDocs("2", 1, indexBuilders); addTermsDocs("3", 1, indexBuilders); addTermsDocs("4", 1, indexBuilders); - addTermsDocs("5", 3, indexBuilders);//low score but high doc freq + addTermsDocs("5", 3, indexBuilders);// low score but high doc freq addTermsDocs("6", 3, indexBuilders); indexRandom(true, false, indexBuilders); // first, check that indeed when not setting the shardMinDocCount parameter 0 terms are returned SearchResponse response = client().prepareSearch(index) - .addAggregation( - terms("myTerms").field("text").minDocCount(2).size(2).shardSize(2).executionHint(randomExecutionHint()) - .order(BucketOrder.key(true)) - ) - .get(); + .addAggregation( + terms("myTerms").field("text") + .minDocCount(2) + .size(2) + .shardSize(2) + .executionHint(randomExecutionHint()) + .order(BucketOrder.key(true)) + ) + .get(); assertSearchResponse(response); Terms sigterms = response.getAggregations().get("myTerms"); assertThat(sigterms.getBuckets().size(), equalTo(0)); response = client().prepareSearch(index) - .addAggregation( - terms("myTerms").field("text").minDocCount(2).shardMinDocCount(2).size(2).shardSize(2) - .executionHint(randomExecutionHint()).order(BucketOrder.key(true)) - ) - .get(); + .addAggregation( + terms("myTerms").field("text") + .minDocCount(2) + .shardMinDocCount(2) + .size(2) + .shardSize(2) + .executionHint(randomExecutionHint()) + .order(BucketOrder.key(true)) + ) + .get(); assertSearchResponse(response); sigterms = response.getAggregations().get("myTerms"); assertThat(sigterms.getBuckets().size(), equalTo(2)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java index d76aa092d2685..252a9e39f8cae 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java @@ -55,7 +55,7 @@ public void testSingleValuedString() { assertNumRareTerms(10, numDocs); } - private void assertNumRareTerms(int maxDocs, int rareTerms) { + private void assertNumRareTerms(int maxDocs, int rareTerms) { final SearchRequestBuilder requestBuilder = client().prepareSearch(index); requestBuilder.addAggregation(new RareTermsAggregationBuilder("rareTerms").field("str_value.keyword").maxDocCount(maxDocs)); final SearchResponse response = requestBuilder.get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index def6db7c965f9..370f9301a9ca3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -129,42 +129,63 @@ protected Map, Object>> nonDeterministicPlu @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", - MULTI_VALUED_FIELD_NAME, "type=keyword", - "tag", "type=keyword").get()); + assertAcked( + client().admin() + .indices() + .prepareCreate("idx") + .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") + .get() + ); List builders = new ArrayList<>(); for (int i = 0; i < 5; i++) { - builders.add(client().prepareIndex("idx").setSource( - jsonBuilder().startObject() + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() .field(SINGLE_VALUED_FIELD_NAME, "val" + i) .field("i", i) .field("constant", 1) .field("tag", i < 5 / 2 + 1 ? "more" : "less") .startArray(MULTI_VALUED_FIELD_NAME) - .value("val" + i) - .value("val" + (i + 1)) + .value("val" + i) + .value("val" + (i + 1)) .endArray() - .endObject())); + .endObject() + ) + ); } getMultiSortDocs(builders); - assertAcked(client().admin().indices().prepareCreate("high_card_idx") - .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", - MULTI_VALUED_FIELD_NAME, "type=keyword", - "tag", "type=keyword").get()); + assertAcked( + client().admin() + .indices() + .prepareCreate("high_card_idx") + .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") + .get() + ); for (int i = 0; i < 100; i++) { - builders.add(client().prepareIndex("high_card_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val" + Strings.padStart(i + "", 3, '0')) - .startArray(MULTI_VALUED_FIELD_NAME).value("val" + Strings.padStart(i + "", 3, '0')) - .value("val" + Strings.padStart((i + 1) + "", 3, '0')).endArray().endObject())); + builders.add( + client().prepareIndex("high_card_idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, "val" + Strings.padStart(i + "", 3, '0')) + .startArray(MULTI_VALUED_FIELD_NAME) + .value("val" + Strings.padStart(i + "", 3, '0')) + .value("val" + Strings.padStart((i + 1) + "", 3, '0')) + .endArray() + .endObject() + ) + ); } prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer").get(); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); } indexRandom(true, builders); createIndex("idx_unmapped"); @@ -216,32 +237,55 @@ private void getMultiSortDocs(List builders) throws IOExcep bucketProps.put("sum_d", 1d); expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); - assertAcked(client().admin().indices().prepareCreate("sort_idx") - .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", - MULTI_VALUED_FIELD_NAME, "type=keyword", - "tag", "type=keyword").get()); + assertAcked( + client().admin() + .indices() + .prepareCreate("sort_idx") + .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") + .get() + ); for (int i = 1; i <= 3; i++) { - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val1").field("l", 1).field("d", i).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val2").field("l", 2).field("d", i).endObject())); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val1").field("l", 1).field("d", i).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val2").field("l", 2).field("d", i).endObject()) + ); } - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 2).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 3).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 2).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val6").field("l", 5).field("d", 1).endObject())); - builders.add(client().prepareIndex("sort_idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val7").field("l", 5).field("d", 1).endObject())); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 3).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val6").field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val7").field("l", 5).field("d", 1).endObject()) + ); } private String key(Terms.Bucket bucket) { @@ -251,11 +295,18 @@ private String key(Terms.Bucket bucket) { // the main purpose of this test is to make sure we're not allocating 2GB of memory per shard public void testSizeIsZero() { final int minDocCount = randomInt(1); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> client() - .prepareSearch("high_card_idx") + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("high_card_idx") .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).minDocCount(minDocCount).size(0)).get()); + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .minDocCount(minDocCount) + .size(0) + ) + .get() + ); assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [terms]")); } @@ -270,8 +321,8 @@ public void testMultiValueFieldWithPartitionedFiltering() throws Exception { private void runTestFieldWithPartitionedFiltering(String field) throws Exception { // Find total number of unique terms SearchResponse allResponse = client().prepareSearch("idx") - .addAggregation(terms("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + .addAggregation(terms("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values()))) + .get(); assertSearchResponse(allResponse); Terms terms = allResponse.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -282,9 +333,13 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception final int numPartitions = randomIntBetween(2, 4); Set foundTerms = new HashSet<>(); for (int partition = 0; partition < numPartitions; partition++) { - SearchResponse response = client().prepareSearch("idx").addAggregation(terms("terms").field(field) - .includeExclude(new IncludeExclude(partition, numPartitions)).collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field(field) + .includeExclude(new IncludeExclude(partition, numPartitions)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(response); terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -297,16 +352,15 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception } public void testSingleValuedFieldWithValueScript() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms") - .executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap())) + ) + .get(); assertSearchResponse(response); @@ -324,17 +378,15 @@ public void testSingleValuedFieldWithValueScript() throws Exception { } public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms") - .executionHint(randomExecutionHint()) - .field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script( - ScriptType.INLINE, CustomScriptPlugin.NAME, "_value.substring(0,3)", Collections.emptyMap()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value.substring(0,3)", Collections.emptyMap())) + ) + .get(); assertSearchResponse(response); @@ -350,16 +402,21 @@ public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { } public void testMultiValuedScript() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - - .addAggregation( - terms("terms") - .executionHint(randomExecutionHint()) - .script(new Script(ScriptType.INLINE, - CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", Collections.emptyMap())) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); + SearchResponse response = client().prepareSearch("idx") + + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .script( + new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + MULTI_VALUED_FIELD_NAME + "']", + Collections.emptyMap() + ) + ) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(response); @@ -381,16 +438,15 @@ public void testMultiValuedScript() throws Exception { } public void testMultiValuedFieldWithValueScript() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms") - .executionHint(randomExecutionHint()) - .field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap()))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap())) + ) + .get(); assertSearchResponse(response); @@ -424,18 +480,19 @@ public void testMultiValuedFieldWithValueScript() throws Exception { */ public void testScriptSingleValue() throws Exception { - Script script = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", + Collections.emptyMap() + ); - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(randomExecutionHint()) - .script(script)) - .get(); + .addAggregation( + terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()).script(script) + ) + .get(); assertSearchResponse(response); @@ -453,18 +510,19 @@ public void testScriptSingleValue() throws Exception { } public void testScriptSingleValueExplicitSingleValue() throws Exception { - Script script = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap()); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", + Collections.emptyMap() + ); - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(randomExecutionHint()) - .script(script)) - .get(); + .addAggregation( + terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()).script(script) + ) + .get(); assertSearchResponse(response); @@ -482,16 +540,21 @@ public void testScriptSingleValueExplicitSingleValue() throws Exception { } public void testScriptMultiValued() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - - .addAggregation( - terms("terms") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(randomExecutionHint()) - .script(new Script(ScriptType.INLINE, - CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", Collections.emptyMap()))) - .get(); + SearchResponse response = client().prepareSearch("idx") + + .addAggregation( + terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(randomExecutionHint()) + .script( + new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + MULTI_VALUED_FIELD_NAME + "']", + Collections.emptyMap() + ) + ) + ) + .get(); assertSearchResponse(response); @@ -513,12 +576,14 @@ public void testScriptMultiValued() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client() - .prepareSearch("idx", "idx_unmapped") + SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))).get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + .get(); assertSearchResponse(response); @@ -537,13 +602,14 @@ public void testPartiallyUnmapped() throws Exception { public void testStringTermsNestedIntoPerBucketAggregator() throws Exception { // no execution hint so that the logic that decides whether or not to use ordinals is executed - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filter("filter", termQuery(MULTI_VALUED_FIELD_NAME, "val3")).subAggregation( - terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())))) - .get(); + .addAggregation( + filter("filter", termQuery(MULTI_VALUED_FIELD_NAME, "val3")).subAggregation( + terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) + ) + ) + .get(); assertThat(response.getFailedShards(), equalTo(0)); @@ -565,14 +631,15 @@ public void testStringTermsNestedIntoPerBucketAggregator() throws Exception { public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { boolean asc = true; try { - client() - .prepareSearch("idx") + client().prepareSearch("idx") .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("inner_terms>avg", asc)) - .subAggregation(terms("inner_terms").field(MULTI_VALUED_FIELD_NAME).subAggregation(avg("avg").field("i")))) + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("inner_terms>avg", asc)) + .subAggregation(terms("inner_terms").field(MULTI_VALUED_FIELD_NAME).subAggregation(avg("avg").field("i"))) + ) .get(); fail("Expected an exception"); } catch (SearchPhaseExecutionException e) { @@ -599,13 +666,16 @@ public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws Exception { boolean asc = randomBoolean(); - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("tags").executionHint(randomExecutionHint()).field("tag") - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("filter", asc)) - .subAggregation(filter("filter", QueryBuilders.matchAllQuery()))).get(); + .addAggregation( + terms("tags").executionHint(randomExecutionHint()) + .field("tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter", asc)) + .subAggregation(filter("filter", QueryBuilders.matchAllQuery())) + ) + .get(); assertSearchResponse(response); @@ -635,19 +705,20 @@ public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels() throws Exception { boolean asc = randomBoolean(); - SearchResponse response = client() - .prepareSearch("idx") - - .addAggregation( - terms("tags") - .executionHint(randomExecutionHint()) - .field("tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter1>filter2>stats.max", asc)) - .subAggregation( - filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( - filter("filter2", QueryBuilders.matchAllQuery()).subAggregation( - stats("stats").field("i"))))).get(); + SearchResponse response = client().prepareSearch("idx") + + .addAggregation( + terms("tags").executionHint(randomExecutionHint()) + .field("tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter1>filter2>stats.max", asc)) + .subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(stats("stats").field("i")) + ) + ) + ) + .get(); assertSearchResponse(response); @@ -698,19 +769,20 @@ public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevelsS statsNameBuilder.append(randomAlphaOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); String statsName = statsNameBuilder.toString(); boolean asc = randomBoolean(); - SearchResponse response = client() - .prepareSearch("idx") - - .addAggregation( - terms("tags") - .executionHint(randomExecutionHint()) - .field("tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter1>" + filter2Name + ">" + statsName + ".max", asc)) - .subAggregation( - filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( - filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation( - stats(statsName).field("i"))))).get(); + SearchResponse response = client().prepareSearch("idx") + + .addAggregation( + terms("tags").executionHint(randomExecutionHint()) + .field("tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter1>" + filter2Name + ">" + statsName + ".max", asc)) + .subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation(stats(statsName).field("i")) + ) + ) + ) + .get(); assertSearchResponse(response); @@ -761,19 +833,20 @@ public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevelsS statsNameBuilder.append(randomAlphaOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); String statsName = statsNameBuilder.toString(); boolean asc = randomBoolean(); - SearchResponse response = client() - .prepareSearch("idx") - - .addAggregation( - terms("tags") - .executionHint(randomExecutionHint()) - .field("tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter1>" + filter2Name + ">" + statsName + "[max]", asc)) - .subAggregation( - filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( - filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation( - stats(statsName).field("i"))))).get(); + SearchResponse response = client().prepareSearch("idx") + + .addAggregation( + terms("tags").executionHint(randomExecutionHint()) + .field("tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter1>" + filter2Name + ">" + statsName + "[max]", asc)) + .subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation(stats(statsName).field("i")) + ) + ) + ) + .get(); assertSearchResponse(response); @@ -821,10 +894,13 @@ public void testSingleValuedFieldOrderedByMissingSubAggregation() throws Excepti try { client().prepareSearch(index) - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", true))).get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("avg_i", true)) + ) + .get(); fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation that doesn't exist"); @@ -839,15 +915,19 @@ public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation( try { client().prepareSearch(index) - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("values", true)) - .subAggregation(terms("values").field("i").collectMode(randomFrom(SubAggCollectionMode.values())))) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("values", true)) + .subAggregation(terms("values").field("i").collectMode(randomFrom(SubAggCollectionMode.values()))) + ) + .get(); - fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation " + - "which is not of a metrics or single-bucket type"); + fail( + "Expected search to fail when trying to sort terms aggregation by sug-aggregation " + + "which is not of a metrics or single-bucket type" + ); } catch (ElasticsearchException e) { // expected @@ -858,16 +938,22 @@ public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation( public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMetric() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - SearchResponse response = client() - .prepareSearch(index) + SearchResponse response = client().prepareSearch(index) - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.foo", true)).subAggregation(stats("stats").field("i"))) - .get(); - fail("Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " - + "with an unknown specified metric to order by. response had " + response.getFailedShards() + " failed shards."); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.foo", true)) + .subAggregation(stats("stats").field("i")) + ) + .get(); + fail( + "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " + + "with an unknown specified metric to order by. response had " + + response.getFailedShards() + + " failed shards." + ); } catch (ElasticsearchException e) { // expected @@ -880,14 +966,20 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric try { client().prepareSearch(index) - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats", true)).subAggregation(stats("stats").field("i"))).execute() - .actionGet(); - - fail("Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " - + "where the metric name is not specified"); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats", true)) + .subAggregation(stats("stats").field("i")) + ) + .execute() + .actionGet(); + + fail( + "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " + + "where the metric name is not specified" + ); } catch (ElasticsearchException e) { // expected @@ -897,13 +989,16 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception { boolean asc = true; - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field("i"))).get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field("i")) + ) + .get(); assertSearchResponse(response); @@ -927,13 +1022,16 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws E public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws Exception { boolean asc = false; - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field("i"))).get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field("i")) + ) + .get(); assertSearchResponse(response); @@ -958,14 +1056,16 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Exception { boolean asc = true; - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.sum_of_squares", asc)) - .subAggregation(extendedStats("stats").field("i"))).get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.sum_of_squares", asc)) + .subAggregation(extendedStats("stats").field("i")) + ) + .get(); assertSearchResponse(response); @@ -990,16 +1090,17 @@ public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Ex public void testSingleValuedFieldOrderedByStatsAggAscWithTermsSubAgg() throws Exception { boolean asc = true; - SearchResponse response = client() - .prepareSearch("idx") - - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.sum_of_squares", asc)) - .subAggregation(extendedStats("stats").field("i")) - .subAggregation(terms("subTerms").field("s_values").collectMode(randomFrom(SubAggCollectionMode.values())))) - .get(); + SearchResponse response = client().prepareSearch("idx") + + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.sum_of_squares", asc)) + .subAggregation(extendedStats("stats").field("i")) + .subAggregation(terms("subTerms").field("s_values").collectMode(randomFrom(SubAggCollectionMode.values()))) + ) + .get(); assertSearchResponse(response); @@ -1060,8 +1161,12 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscSingleValu public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception { String[] expectedKeys = new String[] { "val2", "val1", "val4", "val5", "val3", "val6", "val7" }; - assertMultiSortResponse(expectedKeys, BucketOrder.count(false), BucketOrder.aggregation("sum_d", false), - BucketOrder.aggregation("avg_l", false)); + assertMultiSortResponse( + expectedKeys, + BucketOrder.count(false), + BucketOrder.aggregation("sum_d", false), + BucketOrder.aggregation("avg_l", false) + ); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception { @@ -1070,12 +1175,16 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound } private void assertMultiSortResponse(String[] expectedKeys, BucketOrder... order) { - SearchResponse response = client() - .prepareSearch("sort_idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.compound(order)) - .subAggregation(avg("avg_l").field("l")).subAggregation(sum("sum_d").field("d"))).get(); + SearchResponse response = client().prepareSearch("sort_idx") + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.compound(order)) + .subAggregation(avg("avg_l").field("l")) + .subAggregation(sum("sum_d").field("d")) + ) + .get(); assertSearchResponse(response); @@ -1100,12 +1209,14 @@ private void assertMultiSortResponse(String[] expectedKeys, BucketOrder... order } public void testIndexMetaField() throws Exception { - SearchResponse response = client() - .prepareSearch("idx", "empty_bucket_idx") + SearchResponse response = client().prepareSearch("idx", "empty_bucket_idx") - .addAggregation( - terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()) - .field(IndexFieldMapper.NAME)).get(); + .addAggregation( + terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(randomExecutionHint()) + .field(IndexFieldMapper.NAME) + ) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); @@ -1131,67 +1242,145 @@ public void testOtherDocCount() { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=keyword") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=keyword") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", "foo"), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", "bar")); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", "foo"), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", "bar") + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation( - terms("terms").field("d").script( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap()))) - .get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + terms("terms").field("d") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation( - terms("terms").field("d").script( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap()))) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + terms("terms").field("d") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(terms("terms").field("d")).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } public void testScriptWithValueType() throws Exception { - SearchSourceBuilder builder = new SearchSourceBuilder() - .size(0) - .aggregation(terms("terms") - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "42", Collections.emptyMap())) - .userValueTypeHint(randomFrom(ValueType.NUMERIC, ValueType.NUMBER))); + SearchSourceBuilder builder = new SearchSourceBuilder().size(0) + .aggregation( + terms("terms").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "42", Collections.emptyMap())) + .userValueTypeHint(randomFrom(ValueType.NUMERIC, ValueType.NUMBER)) + ); String source = builder.toString(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { - SearchResponse response = client() - .prepareSearch("idx") - .setSource(SearchSourceBuilder.fromXContent(parser)) - .get(); + SearchResponse response = client().prepareSearch("idx").setSource(SearchSourceBuilder.fromXContent(parser)).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); @@ -1203,10 +1392,10 @@ public void testScriptWithValueType() throws Exception { String invalidValueType = source.replaceAll("\"value_type\":\"n.*\"", "\"value_type\":\"foobar\""); try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidValueType)) { - XContentParseException ex = expectThrows(XContentParseException.class, () -> client() - .prepareSearch("idx") - .setSource(SearchSourceBuilder.fromXContent(parser)) - .get()); + XContentParseException ex = expectThrows( + XContentParseException.class, + () -> client().prepareSearch("idx").setSource(SearchSourceBuilder.fromXContent(parser)).get() + ); assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(ex.getCause().getMessage(), containsString("Unknown value type [foobar]")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java index 2f452ca9c5c23..f700cf7456d1b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java @@ -68,12 +68,12 @@ protected Map, Object>> pluginScripts() { }); scripts.put("doc[' + singleNumericField() + '].value", vars -> { - Map doc =(Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get(singleNumericField()); }); scripts.put("doc[' + multiNumericField(false) + ']", vars -> { - Map doc =(Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return (ScriptDocValues) doc.get(multiNumericField(false)); }); @@ -93,9 +93,9 @@ protected Map, Object>> nonDeterministicPlu @Override public Settings indexSettings() { return Settings.builder() - .put("index.number_of_shards", numberOfShards()) - .put("index.number_of_replicas", numberOfReplicas()) - .build(); + .put("index.number_of_shards", numberOfShards()) + .put("index.number_of_replicas", numberOfReplicas()) + .build(); } static long numDocs; @@ -105,40 +105,47 @@ public Settings indexSettings() { public void setupSuiteScopeCluster() throws Exception { prepareCreate("idx").setMapping( - jsonBuilder().startObject().startObject("_doc").startObject("properties") - .startObject("str_value") - .field("type", "keyword") - .endObject() - .startObject("str_values") - .field("type", "keyword") - .endObject() - .startObject("l_value") - .field("type", "long") - .endObject() - .startObject("l_values") - .field("type", "long") - .endObject() - .startObject("d_value") - .field("type", "double") - .endObject() - .startObject("d_values") - .field("type", "double") - .endObject() - .endObject().endObject().endObject()).get(); + jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("str_value") + .field("type", "keyword") + .endObject() + .startObject("str_values") + .field("type", "keyword") + .endObject() + .startObject("l_value") + .field("type", "long") + .endObject() + .startObject("l_values") + .field("type", "long") + .endObject() + .startObject("d_value") + .field("type", "double") + .endObject() + .startObject("d_values") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .endObject() + ).get(); numDocs = randomIntBetween(2, 100); precisionThreshold = randomIntBetween(0, 1 << randomInt(20)); IndexRequestBuilder[] builders = new IndexRequestBuilder[(int) numDocs]; for (int i = 0; i < numDocs; ++i) { - builders[i] = client().prepareIndex("idx").setSource(jsonBuilder() - .startObject() + builders[i] = client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() .field("str_value", "s" + i) - .array("str_values", new String[]{"s" + (i * 2), "s" + (i * 2 + 1)}) + .array("str_values", new String[] { "s" + (i * 2), "s" + (i * 2 + 1) }) .field("l_value", i) - .array("l_values", new int[] {i * 2, i * 2 + 1}) + .array("l_values", new int[] { i * 2, i * 2 + 1 }) .field("d_value", i) - .array("d_values", new double[]{i * 2, i * 2 + 1}) - .endObject()); + .array("d_values", new double[] { i * 2, i * 2 + 1 }) + .endObject() + ); } indexRandom(true, builders); createIndex("idx_unmapped"); @@ -161,7 +168,8 @@ private void assertCount(Cardinality count, long value) { assertThat(count.getValue(), greaterThan(0L)); } } - private static String singleNumericField() { + + private static String singleNumericField() { return randomBoolean() ? "l_value" : "d_value"; } @@ -171,8 +179,8 @@ private static String multiNumericField(boolean hash) { public void testUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value")) - .get(); + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value")) + .get(); assertSearchResponse(response); @@ -184,8 +192,8 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value")) - .get(); + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value")) + .get(); assertSearchResponse(response); @@ -197,8 +205,8 @@ public void testPartiallyUnmapped() throws Exception { public void testSingleValuedString() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value")) - .get(); + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value")) + .get(); assertSearchResponse(response); @@ -210,8 +218,8 @@ public void testSingleValuedString() throws Exception { public void testSingleValuedNumeric() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField())) - .get(); + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField())) + .get(); assertSearchResponse(response); @@ -222,11 +230,14 @@ public void testSingleValuedNumeric() throws Exception { } public void testSingleValuedNumericGetProperty() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation( - global("global").subAggregation( - cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField()))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + global("global").subAggregation( + cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField()) + ) + ) + .get(); assertSearchResponse(searchResponse); @@ -242,15 +253,15 @@ public void testSingleValuedNumericGetProperty() throws Exception { assertThat(cardinality.getName(), equalTo("cardinality")); long expectedValue = numDocs; assertCount(cardinality, expectedValue); - assertThat(((InternalAggregation)global).getProperty("cardinality"), equalTo(cardinality)); - assertThat(((InternalAggregation)global).getProperty("cardinality.value"), equalTo((double) cardinality.getValue())); - assertThat((double) ((InternalAggregation)cardinality).getProperty("value"), equalTo((double) cardinality.getValue())); + assertThat(((InternalAggregation) global).getProperty("cardinality"), equalTo(cardinality)); + assertThat(((InternalAggregation) global).getProperty("cardinality.value"), equalTo((double) cardinality.getValue())); + assertThat((double) ((InternalAggregation) cardinality).getProperty("value"), equalTo((double) cardinality.getValue())); } public void testSingleValuedNumericHashed() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField())) - .get(); + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField())) + .get(); assertSearchResponse(response); @@ -262,8 +273,8 @@ public void testSingleValuedNumericHashed() throws Exception { public void testMultiValuedString() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_values")) - .get(); + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_values")) + .get(); assertSearchResponse(response); @@ -275,8 +286,8 @@ public void testMultiValuedString() throws Exception { public void testMultiValuedNumeric() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(false))) - .get(); + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(false))) + .get(); assertSearchResponse(response); @@ -288,8 +299,8 @@ public void testMultiValuedNumeric() throws Exception { public void testMultiValuedNumericHashed() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(true))) - .get(); + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(true))) + .get(); assertSearchResponse(response); @@ -301,11 +312,11 @@ public void testMultiValuedNumericHashed() throws Exception { public void testSingleValuedStringScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - cardinality("cardinality") - .precisionThreshold(precisionThreshold) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_value'].value", emptyMap()))) - .get(); + .addAggregation( + cardinality("cardinality").precisionThreshold(precisionThreshold) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_value'].value", emptyMap())) + ) + .get(); assertSearchResponse(response); @@ -317,11 +328,11 @@ public void testSingleValuedStringScript() throws Exception { public void testMultiValuedStringScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - cardinality("cardinality") - .precisionThreshold(precisionThreshold) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_values']", emptyMap()))) - .get(); + .addAggregation( + cardinality("cardinality").precisionThreshold(precisionThreshold) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_values']", emptyMap())) + ) + .get(); assertSearchResponse(response); @@ -334,8 +345,8 @@ public void testMultiValuedStringScript() throws Exception { public void testSingleValuedNumericScript() throws Exception { Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc[' + singleNumericField() + '].value", emptyMap()); SearchResponse response = client().prepareSearch("idx") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script)) - .get(); + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script)) + .get(); assertSearchResponse(response); @@ -346,11 +357,15 @@ public void testSingleValuedNumericScript() throws Exception { } public void testMultiValuedNumericScript() throws Exception { - Script script = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc[' + multiNumericField(false) + ']", Collections.emptyMap()); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc[' + multiNumericField(false) + ']", + Collections.emptyMap() + ); SearchResponse response = client().prepareSearch("idx") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script)) - .get(); + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script)) + .get(); assertSearchResponse(response); @@ -362,12 +377,12 @@ public void testMultiValuedNumericScript() throws Exception { public void testSingleValuedStringValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - cardinality("cardinality") - .precisionThreshold(precisionThreshold) - .field("str_value") - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) - .get(); + .addAggregation( + cardinality("cardinality").precisionThreshold(precisionThreshold) + .field("str_value") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap())) + ) + .get(); assertSearchResponse(response); @@ -379,12 +394,12 @@ public void testSingleValuedStringValueScript() throws Exception { public void testMultiValuedStringValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - cardinality("cardinality") - .precisionThreshold(precisionThreshold) - .field("str_values") - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) - .get(); + .addAggregation( + cardinality("cardinality").precisionThreshold(precisionThreshold) + .field("str_values") + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap())) + ) + .get(); assertSearchResponse(response); @@ -396,12 +411,12 @@ public void testMultiValuedStringValueScript() throws Exception { public void testSingleValuedNumericValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - cardinality("cardinality") - .precisionThreshold(precisionThreshold) - .field(singleNumericField()) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) - .get(); + .addAggregation( + cardinality("cardinality").precisionThreshold(precisionThreshold) + .field(singleNumericField()) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap())) + ) + .get(); assertSearchResponse(response); @@ -413,12 +428,12 @@ public void testSingleValuedNumericValueScript() throws Exception { public void testMultiValuedNumericValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - cardinality("cardinality") - .precisionThreshold(precisionThreshold) - .field(multiNumericField(false)) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) - .get(); + .addAggregation( + cardinality("cardinality").precisionThreshold(precisionThreshold) + .field(multiNumericField(false)) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap())) + ) + .get(); assertSearchResponse(response); @@ -430,10 +445,12 @@ public void testMultiValuedNumericValueScript() throws Exception { public void testAsSubAgg() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("str_value") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_values"))) - .get(); + .addAggregation( + terms("terms").field("str_value") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_values")) + ) + .get(); assertSearchResponse(response); @@ -451,50 +468,130 @@ public void testAsSubAgg() throws Exception { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation( - cardinality("foo").field("d").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", - emptyMap()))) - .get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + cardinality("foo").field("d").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation( - cardinality("foo").field("d").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + cardinality("foo").field("d").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(cardinality("foo").field("d")).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java index bdaa8c6dcd6a8..463c728c3e7a0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java @@ -33,23 +33,33 @@ public void testRequestBreaker() throws Exception { final String requestBreaker = randomIntBetween(1, 10000) + "kb"; logger.info("--> Using request breaker setting: {}", requestBreaker); - indexRandom(true, IntStream.range(0, randomIntBetween(10, 1000)) - .mapToObj(i -> - client().prepareIndex("test").setId("id_" + i) - .setSource(Map.of("field0", randomAlphaOfLength(5), "field1", randomAlphaOfLength(5))) - ).toArray(IndexRequestBuilder[]::new)); + indexRandom( + true, + IntStream.range(0, randomIntBetween(10, 1000)) + .mapToObj( + i -> client().prepareIndex("test") + .setId("id_" + i) + .setSource(Map.of("field0", randomAlphaOfLength(5), "field1", randomAlphaOfLength(5))) + ) + .toArray(IndexRequestBuilder[]::new) + ); - client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), - requestBreaker)) + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings( + Settings.builder().put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), requestBreaker) + ) .get(); try { client().prepareSearch("test") - .addAggregation(terms("terms").field("field0.keyword") - .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("cardinality", randomBoolean())) - .subAggregation(cardinality("cardinality").precisionThreshold(randomLongBetween(1, 40000)).field("field1.keyword"))) + .addAggregation( + terms("terms").field("field0.keyword") + .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("cardinality", randomBoolean())) + .subAggregation(cardinality("cardinality").precisionThreshold(randomLongBetween(1, 40000)).field("field1.keyword")) + ) .get(); } catch (ElasticsearchException e) { if (ExceptionsHelper.unwrap(e, CircuitBreakingException.class) == null) { @@ -57,7 +67,9 @@ public void testRequestBreaker() throws Exception { } } - client().admin().cluster().prepareUpdateSettings() + client().admin() + .cluster() + .prepareUpdateSettings() .setTransientSettings(Settings.builder().putNull(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey())) .get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index 3b40cedff572a..c4c7cc5eb5268 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; @@ -20,7 +21,6 @@ import org.elasticsearch.search.aggregations.bucket.missing.Missing; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.ExtendedStats.Bounds; -import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Collection; import java.util.Collections; @@ -74,8 +74,8 @@ private static double variancePopulation(int... vals) { sum += val; sumOfSqrs += val * val; } - double variance = (sumOfSqrs - ((sum * sum) / vals.length)) / vals.length; - return variance < 0 ? 0 : variance; + double variance = (sumOfSqrs - ((sum * sum) / vals.length)) / vals.length; + return variance < 0 ? 0 : variance; } private static double varianceSampling(int... vals) { @@ -85,17 +85,18 @@ private static double varianceSampling(int... vals) { sum += val; sumOfSqrs += val * val; } - double variance = (sumOfSqrs - ((sum * sum) / vals.length)) / (vals.length - 1); - return variance < 0 ? 0 : variance; + double variance = (sumOfSqrs - ((sum * sum) / vals.length)) / (vals.length - 1); + return variance < 0 ? 0 : variance; } @Override public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(extendedStats("stats").field("value"))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(extendedStats("stats").field("value")) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -120,14 +121,15 @@ public void testEmptyAggregation() throws Exception { assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_POPULATION)), is(true)); assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_POPULATION)), is(true)); assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.UPPER_SAMPLING)), is(true)); - assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_SAMPLING)), is(true));} + assertThat(Double.isNaN(stats.getStdDeviationBound(Bounds.LOWER_SAMPLING)), is(true)); + } @Override public void testUnmapped() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation(extendedStats("stats").field("value")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(extendedStats("stats").field("value")) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); @@ -157,11 +159,15 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() { double sigma = randomDouble() * 5; ExtendedStats s1 = client().prepareSearch("idx") - .addAggregation(extendedStats("stats").field("value").sigma(sigma)).get() - .getAggregations().get("stats"); + .addAggregation(extendedStats("stats").field("value").sigma(sigma)) + .get() + .getAggregations() + .get("stats"); ExtendedStats s2 = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(extendedStats("stats").field("value").sigma(sigma)).get() - .getAggregations().get("stats"); + .addAggregation(extendedStats("stats").field("value").sigma(sigma)) + .get() + .getAggregations() + .get("stats"); assertEquals(s1.getAvg(), s2.getAvg(), 1e-10); assertEquals(s1.getCount(), s2.getCount()); assertEquals(s1.getMin(), s2.getMin(), 0d); @@ -175,27 +181,28 @@ public void testPartiallyUnmapped() { assertEquals(s1.getStdDeviationBound(Bounds.LOWER_POPULATION), s2.getStdDeviationBound(Bounds.LOWER_POPULATION), 1e-10); assertEquals(s1.getStdDeviationBound(Bounds.UPPER_POPULATION), s2.getStdDeviationBound(Bounds.UPPER_POPULATION), 1e-10); assertEquals(s1.getStdDeviationBound(Bounds.LOWER_SAMPLING), s2.getStdDeviationBound(Bounds.LOWER_SAMPLING), 1e-10); - assertEquals(s1.getStdDeviationBound(Bounds.UPPER_SAMPLING), s2.getStdDeviationBound(Bounds.UPPER_SAMPLING), 1e-10);} + assertEquals(s1.getStdDeviationBound(Bounds.UPPER_SAMPLING), s2.getStdDeviationBound(Bounds.UPPER_SAMPLING), 1e-10); + } @Override public void testSingleValuedField() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(extendedStats("stats").field("value").sigma(sigma)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(extendedStats("stats").field("value").sigma(sigma)) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10) / 10)); + assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10)); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(10.0)); - assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10)); + assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10)); assertThat(stats.getCount(), equalTo(10L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100)); + assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100)); assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); @@ -209,21 +216,21 @@ public void testSingleValuedFieldDefaultSigma() throws Exception { // Same as previous test, but uses a default value for sigma SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(extendedStats("stats").field("value")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(extendedStats("stats").field("value")) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10) / 10)); + assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10)); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(10.0)); - assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10)); + assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10)); assertThat(stats.getCount(), equalTo(10L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100)); + assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100)); assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); @@ -235,8 +242,10 @@ public void testSingleValuedFieldDefaultSigma() throws Exception { public void testSingleValuedField_WithFormatter() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(extendedStats("stats").format("0000.0").field("value").sigma(sigma)).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(extendedStats("stats").format("0000.0").field("value").sigma(sigma)) + .get(); assertHitCount(searchResponse, 10); @@ -271,8 +280,10 @@ public void testSingleValuedField_WithFormatter() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(global("global").subAggregation(extendedStats("stats").field("value"))).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(global("global").subAggregation(extendedStats("stats").field("value"))) + .get(); assertHitCount(searchResponse, 10); @@ -286,69 +297,75 @@ public void testSingleValuedFieldGetProperty() throws Exception { ExtendedStats stats = global.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - ExtendedStats statsFromProperty = (ExtendedStats) ((InternalAggregation)global).getProperty("stats"); + ExtendedStats statsFromProperty = (ExtendedStats) ((InternalAggregation) global).getProperty("stats"); assertThat(statsFromProperty, notNullValue()); assertThat(statsFromProperty, sameInstance(stats)); double expectedAvgValue = (double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10; assertThat(stats.getAvg(), equalTo(expectedAvgValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.avg"), equalTo(expectedAvgValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.avg"), equalTo(expectedAvgValue)); double expectedMinValue = 1.0; assertThat(stats.getMin(), equalTo(expectedMinValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.min"), equalTo(expectedMinValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.min"), equalTo(expectedMinValue)); double expectedMaxValue = 10.0; assertThat(stats.getMax(), equalTo(expectedMaxValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.max"), equalTo(expectedMaxValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.max"), equalTo(expectedMaxValue)); double expectedSumValue = 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10; assertThat(stats.getSum(), equalTo(expectedSumValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.sum"), equalTo(expectedSumValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.sum"), equalTo(expectedSumValue)); long expectedCountValue = 10; assertThat(stats.getCount(), equalTo(expectedCountValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.count"), equalTo((double) expectedCountValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.count"), equalTo((double) expectedCountValue)); double expectedSumOfSquaresValue = (double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100; assertThat(stats.getSumOfSquares(), equalTo(expectedSumOfSquaresValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.sum_of_squares"), equalTo(expectedSumOfSquaresValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.sum_of_squares"), equalTo(expectedSumOfSquaresValue)); double expectedVarianceValue = variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); assertThat(stats.getVariance(), equalTo(expectedVarianceValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.variance"), equalTo(expectedVarianceValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.variance"), equalTo(expectedVarianceValue)); double expectedVariancePopulationValue = variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); assertThat(stats.getVariancePopulation(), equalTo(expectedVariancePopulationValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.variance_population"), - equalTo(expectedVariancePopulationValue)); + assertThat( + (double) ((InternalAggregation) global).getProperty("stats.variance_population"), + equalTo(expectedVariancePopulationValue) + ); double expectedVarianceSamplingValue = varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); assertThat(stats.getVarianceSampling(), equalTo(expectedVarianceSamplingValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.variance_sampling"), equalTo(expectedVarianceSamplingValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.variance_sampling"), equalTo(expectedVarianceSamplingValue)); double expectedStdDevValue = stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); assertThat(stats.getStdDeviation(), equalTo(expectedStdDevValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.std_deviation"), equalTo(expectedStdDevValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.std_deviation"), equalTo(expectedStdDevValue)); double expectedStdDevPopulationValue = stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); assertThat(stats.getStdDeviationPopulation(), equalTo(expectedStdDevValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.std_deviation_population"), - equalTo(expectedStdDevPopulationValue)); + assertThat( + (double) ((InternalAggregation) global).getProperty("stats.std_deviation_population"), + equalTo(expectedStdDevPopulationValue) + ); double expectedStdDevSamplingValue = stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); assertThat(stats.getStdDeviationSampling(), equalTo(expectedStdDevSamplingValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.std_deviation_sampling"), - equalTo(expectedStdDevSamplingValue)); + assertThat( + (double) ((InternalAggregation) global).getProperty("stats.std_deviation_sampling"), + equalTo(expectedStdDevSamplingValue) + ); } @Override public void testSingleValuedFieldPartiallyUnmapped() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation(extendedStats("stats").field("value").sigma(sigma)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(extendedStats("stats").field("value").sigma(sigma)) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10) / 10)); + assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10)); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(10.0)); - assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10)); + assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10)); assertThat(stats.getCount(), equalTo(10L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100)); + assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100)); assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); @@ -362,26 +379,25 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { public void testSingleValuedFieldWithValueScript() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - extendedStats("stats") - .field("value") - .script(new Script(ScriptType.INLINE, - AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) - .sigma(sigma)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + extendedStats("stats").field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) + .sigma(sigma) + ) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10)); + assertThat(stats.getAvg(), equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10)); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(11.0)); - assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11)); + assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11)); assertThat(stats.getCount(), equalTo(10L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121)); + assertThat(stats.getSumOfSquares(), equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121)); assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); @@ -397,25 +413,25 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { params.put("inc", 1); double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - extendedStats("stats") - .field("value") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params)) - .sigma(sigma)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + extendedStats("stats").field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params)) + .sigma(sigma) + ) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10)); + assertThat(stats.getAvg(), equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10)); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(11.0)); - assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11)); + assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11)); assertThat(stats.getCount(), equalTo(10L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121)); + assertThat(stats.getSumOfSquares(), equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121)); assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); @@ -429,31 +445,42 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { public void testMultiValuedField() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(extendedStats("stats").field("values").sigma(sigma)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(extendedStats("stats").field("values").sigma(sigma)) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12) / 20)); + assertThat( + stats.getAvg(), + equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12) / 20) + ); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(12.0)); - assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12)); + assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12)); assertThat(stats.getCount(), equalTo(20L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121+9+16+25+36+49+64+81+100+121+144)); + assertThat( + stats.getSumOfSquares(), + equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 144) + ); assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))); - assertThat(stats.getVariancePopulation(), - equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))); - assertThat(stats.getVarianceSampling(), - equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))); + assertThat( + stats.getVariancePopulation(), + equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)) + ); + assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))); assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))); - assertThat(stats.getStdDeviationPopulation(), - equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))); - assertThat(stats.getStdDeviationSampling(), - equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))); + assertThat( + stats.getStdDeviationPopulation(), + equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)) + ); + assertThat( + stats.getStdDeviationSampling(), + equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)) + ); checkUpperLowerBounds(stats, sigma); } @@ -461,34 +488,43 @@ public void testMultiValuedField() throws Exception { public void testMultiValuedFieldWithValueScript() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - extendedStats("stats") - .field("values") - .script(new Script(ScriptType.INLINE, - AggregationTestScriptsPlugin.NAME, "_value - 1", Collections.emptyMap())) - .sigma(sigma)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + extendedStats("stats").field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", Collections.emptyMap())) + .sigma(sigma) + ) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10+2+3+4+5+6+7+8+9+10+11) / 20)); + assertThat( + stats.getAvg(), + equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 20) + ); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(11.0)); - assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10+2+3+4+5+6+7+8+9+10+11)); + assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11)); assertThat(stats.getCount(), equalTo(20L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100+4+9+16+25+36+49+64+81+100+121)); - assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getVariancePopulation(), - equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getStdDeviationPopulation(), - equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); + assertThat( + stats.getSumOfSquares(), + equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121) + ); + assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); + assertThat( + stats.getVariancePopulation(), + equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)) + ); + assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); + assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); + assertThat( + stats.getStdDeviationPopulation(), + equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)) + ); + assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); checkUpperLowerBounds(stats, sigma); } @@ -499,33 +535,43 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { params.put("dec", 1); double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - extendedStats("stats") - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) - .sigma(sigma)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + extendedStats("stats").field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) + .sigma(sigma) + ) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10+2+3+4+5+6+7+8+9+10+11) / 20)); + assertThat( + stats.getAvg(), + equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 20) + ); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(11.0)); - assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10+2+3+4+5+6+7+8+9+10+11)); + assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11)); assertThat(stats.getCount(), equalTo(20L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100+4+9+16+25+36+49+64+81+100+121)); - assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getVariancePopulation(), - equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getStdDeviationPopulation(), - equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); + assertThat( + stats.getSumOfSquares(), + equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121) + ); + assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); + assertThat( + stats.getVariancePopulation(), + equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)) + ); + assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); + assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); + assertThat( + stats.getStdDeviationPopulation(), + equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)) + ); + assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); checkUpperLowerBounds(stats, sigma); } @@ -533,31 +579,31 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { public void testScriptSingleValued() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - extendedStats("stats") - .script(new Script(ScriptType.INLINE, - AggregationTestScriptsPlugin.NAME, "doc['value'].value", Collections.emptyMap())) - .sigma(sigma)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + extendedStats("stats").script( + new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", Collections.emptyMap()) + ).sigma(sigma) + ) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10) / 10)); + assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10)); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(10.0)); - assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10)); + assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10)); assertThat(stats.getCount(), equalTo(10L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100)); - assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); - assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); - assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); - assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); - assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); - assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); + assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100)); + assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); + assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); + assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); + assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); + assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); + assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); checkUpperLowerBounds(stats, sigma); } @@ -570,30 +616,27 @@ public void testScriptSingleValuedWithParams() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - extendedStats("stats") - .script(script) - .sigma(sigma)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(extendedStats("stats").script(script).sigma(sigma)) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10)); + assertThat(stats.getAvg(), equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10)); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(11.0)); - assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11)); + assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11)); assertThat(stats.getCount(), equalTo(10L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121)); - assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); - assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); + assertThat(stats.getSumOfSquares(), equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121)); + assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); + assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); + assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); + assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); + assertThat(stats.getStdDeviationPopulation(), equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); + assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11))); checkUpperLowerBounds(stats, sigma); } @@ -601,35 +644,46 @@ public void testScriptSingleValuedWithParams() throws Exception { public void testScriptMultiValued() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - extendedStats("stats") - .script(new Script(ScriptType.INLINE, - AggregationTestScriptsPlugin.NAME, "doc['values']", Collections.emptyMap())) - .sigma(sigma)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + extendedStats("stats").script( + new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", Collections.emptyMap()) + ).sigma(sigma) + ) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12) / 20)); + assertThat( + stats.getAvg(), + equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12) / 20) + ); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(12.0)); - assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12)); + assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12)); assertThat(stats.getCount(), equalTo(20L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121+9+16+25+36+49+64+81+100+121+144)); - assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); - assertThat(stats.getVariancePopulation(), - equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); - assertThat(stats.getVarianceSampling(), - equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); - assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); - assertThat(stats.getStdDeviationPopulation(), - equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); - assertThat(stats.getStdDeviationSampling(), - equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); + assertThat( + stats.getSumOfSquares(), + equalTo((double) 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 121 + 144) + ); + assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))); + assertThat( + stats.getVariancePopulation(), + equalTo(variancePopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)) + ); + assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))); + assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))); + assertThat( + stats.getStdDeviationPopulation(), + equalTo(stdDevPopulation(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)) + ); + assertThat( + stats.getStdDeviationSampling(), + equalTo(stdDevSampling(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)) + ); checkUpperLowerBounds(stats, sigma); } @@ -638,46 +692,52 @@ public void testScriptMultiValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); - Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "[ doc['value'].value, doc['value'].value - dec ]", - params); + Script script = new Script( + ScriptType.INLINE, + AggregationTestScriptsPlugin.NAME, + "[ doc['value'].value, doc['value'].value - dec ]", + params + ); double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - extendedStats("stats") - .script(script) - .sigma(sigma)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(extendedStats("stats").script(script).sigma(sigma)) + .get(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10+0+1+2+3+4+5+6+7+8+9) / 20)); + assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9) / 20)); assertThat(stats.getMin(), equalTo(0.0)); assertThat(stats.getMax(), equalTo(10.0)); - assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10+0+1+2+3+4+5+6+7+8+9)); + assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9)); assertThat(stats.getCount(), equalTo(20L)); - assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100+0+1+4+9+16+25+36+49+64+81)); - assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8 ,9))); - assertThat(stats.getVariancePopulation(), - equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8 ,9))); - assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8 ,9))); - assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8 ,9))); - assertThat(stats.getStdDeviationPopulation(), - equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8 ,9))); - assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8 ,9))); + assertThat( + stats.getSumOfSquares(), + equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100 + 0 + 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81) + ); + assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9))); + assertThat(stats.getVariancePopulation(), equalTo(variancePopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9))); + assertThat(stats.getVarianceSampling(), equalTo(varianceSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9))); + assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9))); + assertThat( + stats.getStdDeviationPopulation(), + equalTo(stdDevPopulation(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9)) + ); + assertThat(stats.getStdDeviationSampling(), equalTo(stdDevSampling(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9))); checkUpperLowerBounds(stats, sigma); } public void testEmptySubAggregation() { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(terms("value").field("value") - .subAggregation(missing("values").field("values") - .subAggregation(extendedStats("stats").field("value")))) + .addAggregation( + terms("value").field("value") + .subAggregation(missing("values").field("values").subAggregation(extendedStats("stats").field("value"))) + ) .get(); assertHitCount(searchResponse, 10); @@ -716,12 +776,14 @@ public void testEmptySubAggregation() { @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(terms("terms").field("value") + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + terms("terms").field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>extendedStats.avg", true))) - .subAggregation( - filter("filter", termQuery("value", 100)).subAggregation(extendedStats("extendedStats").field("value")))) - .get(); + .subAggregation(filter("filter", termQuery("value", 100)).subAggregation(extendedStats("extendedStats").field("value"))) + ) + .get(); assertHitCount(searchResponse, 10); @@ -765,10 +827,14 @@ public void testOrderByEmptyAggregation() throws Exception { private void checkUpperLowerBounds(ExtendedStats stats, double sigma) { assertThat(stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER), equalTo(stats.getAvg() + (stats.getStdDeviation() * sigma))); assertThat(stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER), equalTo(stats.getAvg() - (stats.getStdDeviation() * sigma))); - assertThat(stats.getStdDeviationBound(Bounds.UPPER_POPULATION), equalTo(stats.getAvg() + - (stats.getStdDeviationPopulation() * sigma))); - assertThat(stats.getStdDeviationBound(Bounds.LOWER_POPULATION), equalTo(stats.getAvg() - - (stats.getStdDeviationPopulation() * sigma))); + assertThat( + stats.getStdDeviationBound(Bounds.UPPER_POPULATION), + equalTo(stats.getAvg() + (stats.getStdDeviationPopulation() * sigma)) + ); + assertThat( + stats.getStdDeviationBound(Bounds.LOWER_POPULATION), + equalTo(stats.getAvg() - (stats.getStdDeviationPopulation() * sigma)) + ); assertThat(stats.getStdDeviationBound(Bounds.UPPER_SAMPLING), equalTo(stats.getAvg() + (stats.getStdDeviationSampling() * sigma))); assertThat(stats.getStdDeviationBound(Bounds.LOWER_SAMPLING), equalTo(stats.getAvg() - (stats.getStdDeviationSampling() * sigma))); } @@ -778,50 +844,133 @@ private void checkUpperLowerBounds(ExtendedStats stats, double sigma) { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(extendedStats("foo").field("d") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", Collections.emptyMap()))) - .get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + extendedStats("foo").field("d") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(extendedStats("foo").field("d") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + extendedStats("foo").field("d") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(extendedStats("foo").field("d")).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java index b63cc43d60090..af498a17fe5b2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java @@ -38,9 +38,8 @@ public class GeoBoundsIT extends AbstractGeoTestCase { public void testSingleValuedField() throws Exception { SearchResponse response = client().prepareSearch(IDX_NAME) - .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME) - .wrapLongitude(false)) - .get(); + .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(false)) + .get(); assertSearchResponse(response); @@ -56,12 +55,10 @@ public void testSingleValuedField() throws Exception { } public void testSingleValuedField_getProperty() throws Exception { - SearchResponse searchResponse = client() - .prepareSearch(IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation( - global("global").subAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(false))) - .get(); + SearchResponse searchResponse = client().prepareSearch(IDX_NAME) + .setQuery(matchAllQuery()) + .addAggregation(global("global").subAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(false))) + .get(); assertSearchResponse(searchResponse); @@ -75,30 +72,32 @@ public void testSingleValuedField_getProperty() throws Exception { GeoBounds geobounds = global.getAggregations().get(aggName); assertThat(geobounds, notNullValue()); assertThat(geobounds.getName(), equalTo(aggName)); - assertThat((GeoBounds) ((InternalAggregation)global).getProperty(aggName), sameInstance(geobounds)); + assertThat((GeoBounds) ((InternalAggregation) global).getProperty(aggName), sameInstance(geobounds)); GeoPoint topLeft = geobounds.topLeft(); GeoPoint bottomRight = geobounds.bottomRight(); assertThat(topLeft.lat(), closeTo(singleTopLeft.lat(), GEOHASH_TOLERANCE)); assertThat(topLeft.lon(), closeTo(singleTopLeft.lon(), GEOHASH_TOLERANCE)); assertThat(bottomRight.lat(), closeTo(singleBottomRight.lat(), GEOHASH_TOLERANCE)); assertThat(bottomRight.lon(), closeTo(singleBottomRight.lon(), GEOHASH_TOLERANCE)); - assertThat((double) ((InternalAggregation)global).getProperty(aggName + ".top"), closeTo(singleTopLeft.lat(), GEOHASH_TOLERANCE)); - assertThat((double) ((InternalAggregation)global).getProperty(aggName + ".left"), closeTo(singleTopLeft.lon(), GEOHASH_TOLERANCE)); - assertThat((double) ((InternalAggregation)global).getProperty(aggName + ".bottom"), - closeTo(singleBottomRight.lat(), GEOHASH_TOLERANCE)); - assertThat((double) ((InternalAggregation)global).getProperty(aggName + ".right"), - closeTo(singleBottomRight.lon(), GEOHASH_TOLERANCE)); + assertThat((double) ((InternalAggregation) global).getProperty(aggName + ".top"), closeTo(singleTopLeft.lat(), GEOHASH_TOLERANCE)); + assertThat((double) ((InternalAggregation) global).getProperty(aggName + ".left"), closeTo(singleTopLeft.lon(), GEOHASH_TOLERANCE)); + assertThat( + (double) ((InternalAggregation) global).getProperty(aggName + ".bottom"), + closeTo(singleBottomRight.lat(), GEOHASH_TOLERANCE) + ); + assertThat( + (double) ((InternalAggregation) global).getProperty(aggName + ".right"), + closeTo(singleBottomRight.lon(), GEOHASH_TOLERANCE) + ); } public void testMultiValuedField() throws Exception { SearchResponse response = client().prepareSearch(IDX_NAME) - .addAggregation(geoBounds(aggName).field(MULTI_VALUED_FIELD_NAME) - .wrapLongitude(false)) - .get(); + .addAggregation(geoBounds(aggName).field(MULTI_VALUED_FIELD_NAME).wrapLongitude(false)) + .get(); assertSearchResponse(response); - GeoBounds geoBounds = response.getAggregations().get(aggName); assertThat(geoBounds, notNullValue()); assertThat(geoBounds.getName(), equalTo(aggName)); @@ -112,9 +111,8 @@ public void testMultiValuedField() throws Exception { public void testUnmapped() throws Exception { SearchResponse response = client().prepareSearch(UNMAPPED_IDX_NAME) - .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME) - .wrapLongitude(false)) - .get(); + .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(false)) + .get(); assertSearchResponse(response); @@ -129,9 +127,8 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch(IDX_NAME, UNMAPPED_IDX_NAME) - .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME) - .wrapLongitude(false)) - .get(); + .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(false)) + .get(); assertSearchResponse(response); @@ -148,10 +145,9 @@ public void testPartiallyUnmapped() throws Exception { public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch(EMPTY_IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME) - .wrapLongitude(false)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(false)) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); GeoBounds geoBounds = searchResponse.getAggregations().get(aggName); @@ -165,9 +161,8 @@ public void testEmptyAggregation() throws Exception { public void testSingleValuedFieldNearDateLine() throws Exception { SearchResponse response = client().prepareSearch(DATELINE_IDX_NAME) - .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME) - .wrapLongitude(false)) - .get(); + .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(false)) + .get(); assertSearchResponse(response); @@ -190,8 +185,8 @@ public void testSingleValuedFieldNearDateLineWrapLongitude() throws Exception { GeoPoint geoValuesTopLeft = new GeoPoint(38, 170); GeoPoint geoValuesBottomRight = new GeoPoint(-24, -175); SearchResponse response = client().prepareSearch(DATELINE_IDX_NAME) - .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(true)) - .get(); + .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(true)) + .get(); assertSearchResponse(response); @@ -211,9 +206,11 @@ public void testSingleValuedFieldNearDateLineWrapLongitude() throws Exception { */ public void testSingleValuedFieldAsSubAggToHighCardTermsAgg() { SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME) - .addAggregation(terms("terms").field(NUMBER_FIELD_NAME).subAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME) - .wrapLongitude(false))) - .get(); + .addAggregation( + terms("terms").field(NUMBER_FIELD_NAME) + .subAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(false)) + ) + .get(); assertSearchResponse(response); @@ -238,7 +235,8 @@ public void testSingleValuedFieldAsSubAggToHighCardTermsAgg() { public void testSingleValuedFieldWithZeroLon() throws Exception { SearchResponse response = client().prepareSearch(IDX_ZERO_NAME) - .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(false)).get(); + .addAggregation(geoBounds(aggName).field(SINGLE_VALUED_FIELD_NAME).wrapLongitude(false)) + .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java index ce222a9000651..f377a5be1c845 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java @@ -36,9 +36,9 @@ public class GeoCentroidIT extends AbstractGeoTestCase { public void testEmptyAggregation() throws Exception { SearchResponse response = client().prepareSearch(EMPTY_IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME)) + .get(); assertSearchResponse(response); GeoCentroid geoCentroid = response.getAggregations().get(aggName); @@ -52,8 +52,8 @@ public void testEmptyAggregation() throws Exception { public void testUnmapped() throws Exception { SearchResponse response = client().prepareSearch(UNMAPPED_IDX_NAME) - .addAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME)) - .get(); + .addAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME)) + .get(); assertSearchResponse(response); GeoCentroid geoCentroid = response.getAggregations().get(aggName); @@ -66,8 +66,8 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch(IDX_NAME, UNMAPPED_IDX_NAME) - .addAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME)) - .get(); + .addAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME)) + .get(); assertSearchResponse(response); GeoCentroid geoCentroid = response.getAggregations().get(aggName); @@ -81,9 +81,9 @@ public void testPartiallyUnmapped() throws Exception { public void testSingleValuedField() throws Exception { SearchResponse response = client().prepareSearch(IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME)) + .get(); assertSearchResponse(response); GeoCentroid geoCentroid = response.getAggregations().get(aggName); @@ -97,9 +97,9 @@ public void testSingleValuedField() throws Exception { public void testSingleValueFieldGetProperty() throws Exception { SearchResponse response = client().prepareSearch(IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation(global("global").subAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(global("global").subAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME))) + .get(); assertSearchResponse(response); Global global = response.getAggregations().get("global"); @@ -112,24 +112,28 @@ public void testSingleValueFieldGetProperty() throws Exception { GeoCentroid geoCentroid = global.getAggregations().get(aggName); assertThat(geoCentroid, notNullValue()); assertThat(geoCentroid.getName(), equalTo(aggName)); - assertThat((GeoCentroid) ((InternalAggregation)global).getProperty(aggName), sameInstance(geoCentroid)); + assertThat((GeoCentroid) ((InternalAggregation) global).getProperty(aggName), sameInstance(geoCentroid)); GeoPoint centroid = geoCentroid.centroid(); assertThat(centroid.lat(), closeTo(singleCentroid.lat(), GEOHASH_TOLERANCE)); assertThat(centroid.lon(), closeTo(singleCentroid.lon(), GEOHASH_TOLERANCE)); - assertThat(((GeoPoint) ((InternalAggregation)global).getProperty(aggName + ".value")).lat(), - closeTo(singleCentroid.lat(), GEOHASH_TOLERANCE)); - assertThat(((GeoPoint) ((InternalAggregation)global).getProperty(aggName + ".value")).lon(), - closeTo(singleCentroid.lon(), GEOHASH_TOLERANCE)); - assertThat((double) ((InternalAggregation)global).getProperty(aggName + ".lat"), closeTo(singleCentroid.lat(), GEOHASH_TOLERANCE)); - assertThat((double) ((InternalAggregation)global).getProperty(aggName + ".lon"), closeTo(singleCentroid.lon(), GEOHASH_TOLERANCE)); + assertThat( + ((GeoPoint) ((InternalAggregation) global).getProperty(aggName + ".value")).lat(), + closeTo(singleCentroid.lat(), GEOHASH_TOLERANCE) + ); + assertThat( + ((GeoPoint) ((InternalAggregation) global).getProperty(aggName + ".value")).lon(), + closeTo(singleCentroid.lon(), GEOHASH_TOLERANCE) + ); + assertThat((double) ((InternalAggregation) global).getProperty(aggName + ".lat"), closeTo(singleCentroid.lat(), GEOHASH_TOLERANCE)); + assertThat((double) ((InternalAggregation) global).getProperty(aggName + ".lon"), closeTo(singleCentroid.lon(), GEOHASH_TOLERANCE)); assertEquals(numDocs, (long) ((InternalAggregation) global).getProperty(aggName + ".count")); } public void testMultiValuedField() throws Exception { SearchResponse searchResponse = client().prepareSearch(IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation(geoCentroid(aggName).field(MULTI_VALUED_FIELD_NAME)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(geoCentroid(aggName).field(MULTI_VALUED_FIELD_NAME)) + .get(); assertSearchResponse(searchResponse); GeoCentroid geoCentroid = searchResponse.getAggregations().get(aggName); @@ -143,9 +147,10 @@ public void testMultiValuedField() throws Exception { public void testSingleValueFieldAsSubAggToGeohashGrid() throws Exception { SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME) - .addAggregation(geohashGrid("geoGrid").field(SINGLE_VALUED_FIELD_NAME) - .subAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME))) - .get(); + .addAggregation( + geohashGrid("geoGrid").field(SINGLE_VALUED_FIELD_NAME).subAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME)) + ) + .get(); assertSearchResponse(response); GeoGrid grid = response.getAggregations().get("geoGrid"); @@ -156,10 +161,16 @@ public void testSingleValueFieldAsSubAggToGeohashGrid() throws Exception { String geohash = cell.getKeyAsString(); GeoPoint expectedCentroid = expectedCentroidsForGeoHash.get(geohash); GeoCentroid centroidAgg = cell.getAggregations().get(aggName); - assertThat("Geohash " + geohash + " has wrong centroid latitude ", expectedCentroid.lat(), - closeTo(centroidAgg.centroid().lat(), GEOHASH_TOLERANCE)); - assertThat("Geohash " + geohash + " has wrong centroid longitude", expectedCentroid.lon(), - closeTo(centroidAgg.centroid().lon(), GEOHASH_TOLERANCE)); + assertThat( + "Geohash " + geohash + " has wrong centroid latitude ", + expectedCentroid.lat(), + closeTo(centroidAgg.centroid().lat(), GEOHASH_TOLERANCE) + ); + assertThat( + "Geohash " + geohash + " has wrong centroid longitude", + expectedCentroid.lon(), + closeTo(centroidAgg.centroid().lon(), GEOHASH_TOLERANCE) + ); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 2809361425fb4..46b6421086703 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -14,12 +14,12 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Arrays; import java.util.Collection; @@ -58,15 +58,15 @@ private static double[] randomPercents(long minValue, long maxValue) { final double[] percents = new double[length]; for (int i = 0; i < percents.length; ++i) { switch (randomInt(20)) { - case 0: - percents[i] = minValue; - break; - case 1: - percents[i] = maxValue; - break; - default: - percents[i] = (randomDouble() * (maxValue - minValue)) + minValue; - break; + case 0: + percents[i] = minValue; + break; + case 1: + percents[i] = maxValue; + break; + default: + percents[i] = (randomDouble() * (maxValue - minValue)) + minValue; + break; } } Arrays.sort(percents); @@ -105,19 +105,19 @@ private void assertConsistent(double[] pcts, PercentileRanks values, long minVal @Override public void testEmptyAggregation() throws Exception { int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo") - .field("value") - .interval(1L) - .minDocCount(0) - .subAggregation( - percentileRanks("percentile_ranks", new double[]{10, 15}) - .field("value").method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits))) - .get(); + SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation( + percentileRanks("percentile_ranks", new double[] { 10, 15 }).field("value") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + ) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -135,15 +135,14 @@ public void testEmptyAggregation() throws Exception { @Override public void testUnmapped() throws Exception { int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", new double[]{0, 10, 15, 100}) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .field("value")) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx_unmapped") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", new double[] { 0, 10, 15, 100 }).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("value") + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); @@ -160,13 +159,14 @@ public void testUnmapped() throws Exception { public void testSingleValuedField() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value")) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("value") + ) + .get(); assertHitCount(searchResponse, 10); @@ -177,30 +177,34 @@ public void testSingleValuedField() throws Exception { public void testNullValuesField() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = null; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .field("value")) - .get()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("value") + ) + .get() + ); assertThat(e.getMessage(), equalTo("[values] must not be null: [percentile_ranks]")); } public void testEmptyValuesField() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = new double[0]; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .field("value")) - .get()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("value") + ) + .get() + ); assertThat(e.getMessage(), equalTo("[values] must not be an empty array: [percentile_ranks]")); } @@ -208,16 +212,16 @@ public void testEmptyValuesField() throws Exception { public void testSingleValuedFieldGetProperty() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - global("global").subAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .field("value"))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + global("global").subAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("value") + ) + ) + .get(); assertHitCount(searchResponse, 10); @@ -231,20 +235,21 @@ public void testSingleValuedFieldGetProperty() throws Exception { PercentileRanks values = global.getAggregations().get("percentile_ranks"); assertThat(values, notNullValue()); assertThat(values.getName(), equalTo("percentile_ranks")); - assertThat(((InternalAggregation)global).getProperty("percentile_ranks"), sameInstance(values)); + assertThat(((InternalAggregation) global).getProperty("percentile_ranks"), sameInstance(values)); } public void testSingleValuedFieldOutsideRange() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = new double[] { minValue - 1, maxValue + 1 }; - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value")) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("value") + ) + .get(); assertHitCount(searchResponse, 10); @@ -256,13 +261,14 @@ public void testSingleValuedFieldOutsideRange() throws Exception { public void testSingleValuedFieldPartiallyUnmapped() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client() - .prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value")) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("value") + ) + .get(); assertHitCount(searchResponse, 10); @@ -274,16 +280,15 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { public void testSingleValuedFieldWithValueScript() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValue - 1, maxValue - 1); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .field("value") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + ) + .get(); assertHitCount(searchResponse, 10); @@ -297,16 +302,15 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); final double[] pcts = randomPercents(minValue - 1, maxValue - 1); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .field("value") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) + ) + .get(); assertHitCount(searchResponse, 10); @@ -318,13 +322,14 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { public void testMultiValuedField() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValues, maxValues); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("values")) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("values") + ) + .get(); assertHitCount(searchResponse, 10); @@ -336,16 +341,15 @@ public void testMultiValuedField() throws Exception { public void testMultiValuedFieldWithValueScript() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValues - 1, maxValues - 1); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + ) + .get(); assertHitCount(searchResponse, 10); @@ -356,16 +360,15 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScriptReverse() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(20 - maxValues, 20 - minValues); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "20 - _value", emptyMap()))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "20 - _value", emptyMap())) + ) + .get(); assertHitCount(searchResponse, 10); @@ -379,16 +382,15 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); final double[] pcts = randomPercents(minValues - 1, maxValues - 1); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) + ) + .get(); assertHitCount(searchResponse, 10); @@ -400,15 +402,14 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { public void testScriptSingleValued() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap())) + ) + .get(); assertHitCount(searchResponse, 10); @@ -425,15 +426,14 @@ public void testScriptSingleValuedWithParams() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value - dec", params); final double[] pcts = randomPercents(minValue - 1, maxValue - 1); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .script(script)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .script(script) + ) + .get(); assertHitCount(searchResponse, 10); @@ -448,15 +448,14 @@ public void testScriptMultiValued() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", emptyMap()); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .script(script)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .script(script) + ) + .get(); assertHitCount(searchResponse, 10); @@ -470,15 +469,14 @@ public void testScriptMultiValuedWithParams() throws Exception { Script script = AggregationTestScriptsPlugin.DECREMENT_ALL_VALUES; final double[] pcts = randomPercents(minValues - 1, maxValues - 1); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts) - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .script(script)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .script(script) + ) + .get(); assertHitCount(searchResponse, 10); @@ -489,15 +487,19 @@ public void testScriptMultiValuedWithParams() throws Exception { public void testOrderBySubAggregation() { int sigDigits = randomSignificantDigits(); boolean asc = randomBoolean(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field("value").interval(2L) - .subAggregation( - percentileRanks("percentile_ranks", new double[]{99}).field("value").method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits)) - .order(BucketOrder.aggregation("percentile_ranks", "99", asc))).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(2L) + .subAggregation( + percentileRanks("percentile_ranks", new double[] { 99 }).field("value") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + ) + .order(BucketOrder.aggregation("percentile_ranks", "99", asc)) + ) + .get(); assertHitCount(searchResponse, 10); @@ -517,11 +519,18 @@ public void testOrderBySubAggregation() { @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>ranks.99", true))) - .subAggregation(filter("filter", termQuery("value", 100)) - .subAggregation(percentileRanks("ranks", new double[]{99}).method(PercentilesMethod.HDR).field("value")))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + terms("terms").field("value") + .order(BucketOrder.compound(BucketOrder.aggregation("filter>ranks.99", true))) + .subAggregation( + filter("filter", termQuery("value", 100)).subAggregation( + percentileRanks("ranks", new double[] { 99 }).method(PercentilesMethod.HDR).field("value") + ) + ) + ) + .get(); assertHitCount(searchResponse, 10); @@ -551,55 +560,138 @@ public void testOrderByEmptyAggregation() throws Exception { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client() - .prepareSearch("cache_test_idx").setSize(0) - .addAggregation(percentileRanks("foo", new double[]{50.0}) - .method(PercentilesMethod.HDR).field("d") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap()))) - .get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR) + .field("d") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client() - .prepareSearch("cache_test_idx").setSize(0) - .addAggregation(percentileRanks("foo", new double[]{50.0}) - .method(PercentilesMethod.HDR).field("d") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR) + .field("d") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(percentileRanks("foo", new double[]{50.0}).method(PercentilesMethod.HDR).field("d")).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR).field("d")) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index 9b46cee19d929..0a5d19b7af884 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -61,18 +61,18 @@ private static double[] randomPercentiles() { final Set uniquedPercentiles = new HashSet<>(); while (uniquedPercentiles.size() < length) { switch (randomInt(20)) { - case 0: - uniquedPercentiles.add(0.0); - break; - case 1: - uniquedPercentiles.add(100.0); - break; - default: - uniquedPercentiles.add(randomDouble() * 100); - break; + case 0: + uniquedPercentiles.add(0.0); + break; + case 1: + uniquedPercentiles.add(100.0); + break; + default: + uniquedPercentiles.add(randomDouble() * 100); + break; } } - double[] percentiles= uniquedPercentiles.stream().mapToDouble(Double::doubleValue).sorted().toArray(); + double[] percentiles = uniquedPercentiles.stream().mapToDouble(Double::doubleValue).sorted().toArray(); LogManager.getLogger(HDRPercentilesIT.class).info("Using percentiles={}", Arrays.toString(percentiles)); return percentiles; } @@ -108,20 +108,20 @@ private void assertConsistent(double[] pcts, Percentiles percentiles, long minVa @Override public void testEmptyAggregation() throws Exception { int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo") - .field("value") - .interval(1L) - .minDocCount(0) - .subAggregation( - percentiles("percentiles").field("value") - .numberOfSignificantValueDigits(sigDigits) - .method(PercentilesMethod.HDR) - .percentiles(10, 15))) - .get(); + SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation( + percentiles("percentiles").field("value") + .numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .percentiles(10, 15) + ) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -139,12 +139,15 @@ public void testEmptyAggregation() throws Exception { @Override public void testUnmapped() throws Exception { int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("value") - .percentiles(0, 10, 15, 100)).get(); + SearchResponse searchResponse = client().prepareSearch("idx_unmapped") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("value") + .percentiles(0, 10, 15, 100) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); @@ -161,13 +164,15 @@ public void testUnmapped() throws Exception { public void testSingleValuedField() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomIntBetween(1, 5); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("value") - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("value") + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -179,14 +184,17 @@ public void testSingleValuedField() throws Exception { public void testSingleValuedFieldGetProperty() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - global("global").subAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR) - .field("value") - .percentiles(pcts))).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + global("global").subAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("value") + .percentiles(pcts) + ) + ) + .get(); assertHitCount(searchResponse, 10); @@ -200,7 +208,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { Percentiles percentiles = global.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); assertThat(percentiles.getName(), equalTo("percentiles")); - assertThat(((InternalAggregation)global).getProperty("percentiles"), sameInstance(percentiles)); + assertThat(((InternalAggregation) global).getProperty("percentiles"), sameInstance(percentiles)); } @@ -208,13 +216,15 @@ public void testSingleValuedFieldGetProperty() throws Exception { public void testSingleValuedFieldPartiallyUnmapped() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("value") - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("value") + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -226,17 +236,16 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { public void testSingleValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles") - .numberOfSignificantValueDigits(sigDigits) - .method(PercentilesMethod.HDR) - .field("value") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -251,17 +260,16 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles") - .numberOfSignificantValueDigits(sigDigits) - .method(PercentilesMethod.HDR) - .field("value") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -273,13 +281,15 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { public void testMultiValuedField() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("values") - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("values") + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -291,17 +301,16 @@ public void testMultiValuedField() throws Exception { public void testMultiValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles") - .numberOfSignificantValueDigits(sigDigits) - .method(PercentilesMethod.HDR) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -312,17 +321,16 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScriptReverse() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles") - .numberOfSignificantValueDigits(sigDigits) - .method(PercentilesMethod.HDR) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "20 - _value", emptyMap())) - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "20 - _value", emptyMap())) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -337,17 +345,16 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles") - .numberOfSignificantValueDigits(sigDigits) - .method(PercentilesMethod.HDR) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -359,16 +366,15 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { public void testScriptSingleValued() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles") - .numberOfSignificantValueDigits(sigDigits) - .method(PercentilesMethod.HDR) - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap())) - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap())) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -385,16 +391,15 @@ public void testScriptSingleValuedWithParams() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles") - .numberOfSignificantValueDigits(sigDigits) - .method(PercentilesMethod.HDR) - .script(script) - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .script(script) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -409,16 +414,15 @@ public void testScriptMultiValued() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", emptyMap()); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles") - .numberOfSignificantValueDigits(sigDigits) - .method(PercentilesMethod.HDR) - .script(script) - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .script(script) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -432,16 +436,15 @@ public void testScriptMultiValuedWithParams() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentiles("percentiles") - .numberOfSignificantValueDigits(sigDigits) - .method(PercentilesMethod.HDR) - .script(script) - .percentiles(pcts)) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .script(script) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -452,17 +455,20 @@ public void testScriptMultiValuedWithParams() throws Exception { public void testOrderBySubAggregation() { int sigDigits = randomSignificantDigits(); boolean asc = randomBoolean(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field("value").interval(2L) - .subAggregation( - percentiles("percentiles").field("value") - .method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits) - .percentiles(99)) - .order(BucketOrder.aggregation("percentiles", "99", asc))).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(2L) + .subAggregation( + percentiles("percentiles").field("value") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .percentiles(99) + ) + .order(BucketOrder.aggregation("percentiles", "99", asc)) + ) + .get(); assertHitCount(searchResponse, 10); @@ -483,12 +489,17 @@ public void testOrderBySubAggregation() { @Override public void testOrderByEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>percentiles.99", true))) - .subAggregation(filter("filter", termQuery("value", 100)) - .subAggregation(percentiles("percentiles").method(PercentilesMethod.HDR).field("value")))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + terms("terms").field("value") + .order(BucketOrder.compound(BucketOrder.aggregation("filter>percentiles.99", true))) + .subAggregation( + filter("filter", termQuery("value", 100)).subAggregation( + percentiles("percentiles").method(PercentilesMethod.HDR).field("value") + ) + ) + ) + .get(); assertHitCount(searchResponse, 10); @@ -518,51 +529,140 @@ public void testOrderByEmptyAggregation() throws Exception { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(percentiles("foo").method(PercentilesMethod.HDR).field("d").percentiles(50.0) - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap()))) - .get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + percentiles("foo").method(PercentilesMethod.HDR) + .field("d") + .percentiles(50.0) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(percentiles("foo").method(PercentilesMethod.HDR).field("d").percentiles(50.0) - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + percentiles("foo").method(PercentilesMethod.HDR) + .field("d") + .percentiles(50.0) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(percentiles("foo").method(PercentilesMethod.HDR).field("d").percentiles(50.0)).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(percentiles("foo").method(PercentilesMethod.HDR).field("d").percentiles(50.0)) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java index f85b5fd9c4de0..ba66cd18d53ad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java @@ -44,8 +44,8 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.range; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.search.aggregations.metrics.MedianAbsoluteDeviationAggregatorTests.IsCloseToRelative.closeToRelative; import static org.elasticsearch.search.aggregations.metrics.MedianAbsoluteDeviationAggregatorTests.ExactMedianAbsoluteDeviation.calculateMAD; +import static org.elasticsearch.search.aggregations.metrics.MedianAbsoluteDeviationAggregatorTests.IsCloseToRelative.closeToRelative; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; @@ -69,10 +69,7 @@ public class MedianAbsoluteDeviationIT extends AbstractNumericTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - final Settings settings = Settings.builder() - .put(SETTING_NUMBER_OF_SHARDS, 1) - .put(SETTING_NUMBER_OF_REPLICAS, 0) - .build(); + final Settings settings = Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build(); createIndex("idx", settings); createIndex("idx_unmapped", settings); @@ -96,15 +93,17 @@ public void setupSuiteScopeCluster() throws Exception { multiValueSample[i * 2] = firstMultiValueDatapoint; multiValueSample[(i * 2) + 1] = secondMultiValueDatapoint; - IndexRequestBuilder builder = client().prepareIndex("idx").setId(String.valueOf(i)) - .setSource(jsonBuilder() - .startObject() + IndexRequestBuilder builder = client().prepareIndex("idx") + .setId(String.valueOf(i)) + .setSource( + jsonBuilder().startObject() .field("value", singleValueDatapoint) .startArray("values") - .value(firstMultiValueDatapoint) - .value(secondMultiValueDatapoint) + .value(firstMultiValueDatapoint) + .value(secondMultiValueDatapoint) .endArray() - .endObject()); + .endObject() + ); builders.add(builder); } @@ -114,16 +113,15 @@ public void setupSuiteScopeCluster() throws Exception { indexRandom(true, builders); - prepareCreate("empty_bucket_idx") - .setMapping("value", "type=integer") - .get(); + prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId(String.valueOf(i)).setSource(jsonBuilder() - .startObject() - .field("value", i*2) - .endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId(String.valueOf(i)) + .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); @@ -144,16 +142,8 @@ private static MedianAbsoluteDeviationAggregationBuilder randomBuilder() { @Override public void testEmptyAggregation() throws Exception { - final SearchResponse response = client() - .prepareSearch("empty_bucket_idx") - .addAggregation( - histogram("histogram") - .field("value") - .interval(1) - .minDocCount(0) - .subAggregation( - randomBuilder() - .field("value"))) + final SearchResponse response = client().prepareSearch("empty_bucket_idx") + .addAggregation(histogram("histogram").field("value").interval(1).minDocCount(0).subAggregation(randomBuilder().field("value"))) .get(); assertHitCount(response, 2); @@ -176,12 +166,9 @@ public void testUnmapped() throws Exception { @Override public void testSingleValuedField() throws Exception { - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation( - randomBuilder() - .field("value")) + .addAggregation(randomBuilder().field("value")) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -194,14 +181,9 @@ public void testSingleValuedField() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation( - global("global") - .subAggregation( - randomBuilder() - .field("value"))) + .addAggregation(global("global").subAggregation(randomBuilder().field("value"))) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -221,12 +203,9 @@ public void testSingleValuedFieldGetProperty() throws Exception { @Override public void testSingleValuedFieldPartiallyUnmapped() throws Exception { - final SearchResponse response = client() - .prepareSearch("idx", "idx_unmapped") + final SearchResponse response = client().prepareSearch("idx", "idx_unmapped") .setQuery(matchAllQuery()) - .addAggregation( - randomBuilder() - .field("value")) + .addAggregation(randomBuilder().field("value")) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -239,13 +218,12 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { @Override public void testSingleValuedFieldWithValueScript() throws Exception { - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - randomBuilder() - .field("value") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))) + randomBuilder().field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) + ) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -254,9 +232,7 @@ public void testSingleValuedFieldWithValueScript() throws Exception { assertThat(mad, notNullValue()); assertThat(mad.getName(), is("mad")); - final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample) - .map(point -> point + 1) - .toArray()); + final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample).map(point -> point + 1).toArray()); assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD)); } @@ -265,13 +241,12 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { final Map params = new HashMap<>(); params.put("inc", 1); - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - randomBuilder() - .field("value") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params))) + randomBuilder().field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params)) + ) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -280,20 +255,15 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { assertThat(mad, notNullValue()); assertThat(mad.getName(), is("mad")); - final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample) - .map(point -> point + 1) - .toArray()); + final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample).map(point -> point + 1).toArray()); assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD)); } @Override public void testMultiValuedField() throws Exception { - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation( - randomBuilder() - .field("values")) + .addAggregation(randomBuilder().field("values")) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -306,13 +276,12 @@ public void testMultiValuedField() throws Exception { @Override public void testMultiValuedFieldWithValueScript() throws Exception { - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - randomBuilder() - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))) + randomBuilder().field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) + ) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -320,9 +289,7 @@ public void testMultiValuedFieldWithValueScript() throws Exception { final MedianAbsoluteDeviation mad = response.getAggregations().get("mad"); assertThat(mad, notNullValue()); - final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(multiValueSample) - .map(point -> point + 1) - .toArray()); + final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(multiValueSample).map(point -> point + 1).toArray()); assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD)); } @@ -331,13 +298,12 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { final Map params = new HashMap<>(); params.put("inc", 1); - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - randomBuilder() - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params))) + randomBuilder().field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params)) + ) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -345,20 +311,19 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { final MedianAbsoluteDeviation mad = response.getAggregations().get("mad"); assertThat(mad, notNullValue()); - final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(multiValueSample) - .map(point -> point + 1) - .toArray()); + final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(multiValueSample).map(point -> point + 1).toArray()); assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD)); } @Override public void testScriptSingleValued() throws Exception { - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - randomBuilder() - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", Collections.emptyMap()))) + randomBuilder().script( + new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", Collections.emptyMap()) + ) + ) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -374,12 +339,11 @@ public void testScriptSingleValuedWithParams() throws Exception { final Map params = new HashMap<>(); params.put("inc", 1); - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - randomBuilder() - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value + inc", params))) + randomBuilder().script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value + inc", params)) + ) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -388,24 +352,19 @@ public void testScriptSingleValuedWithParams() throws Exception { assertThat(mad, notNullValue()); assertThat(mad.getName(), is("mad")); - final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample) - .map(point -> point + 1) - .toArray()); + final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample).map(point -> point + 1).toArray()); assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD)); } @Override public void testScriptMultiValued() throws Exception { - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - randomBuilder() - .script(new Script( - ScriptType.INLINE, - AggregationTestScriptsPlugin.NAME, - "doc['values']", - Collections.emptyMap()))) + randomBuilder().script( + new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", Collections.emptyMap()) + ) + ) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -421,16 +380,18 @@ public void testScriptMultiValuedWithParams() throws Exception { final Map params = new HashMap<>(); params.put("inc", 1); - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - randomBuilder() - .script(new Script( + randomBuilder().script( + new Script( ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "[ doc['value'].value, doc['value'].value + inc ]", - params))) + params + ) + ) + ) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -439,25 +400,22 @@ public void testScriptMultiValuedWithParams() throws Exception { assertThat(mad, notNullValue()); assertThat(mad.getName(), is("mad")); - final double fromIncrementedSampleMAD = calculateMAD(Arrays.stream(singleValueSample) - .flatMap(point -> LongStream.of(point, point + 1)) - .toArray()); + final double fromIncrementedSampleMAD = calculateMAD( + Arrays.stream(singleValueSample).flatMap(point -> LongStream.of(point, point + 1)).toArray() + ); assertThat(mad.getMedianAbsoluteDeviation(), closeToRelative(fromIncrementedSampleMAD)); } public void testAsSubAggregation() throws Exception { final int rangeBoundary = (MAX_SAMPLE_VALUE + MIN_SAMPLE_VALUE) / 2; - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - range("range") - .field("value") + range("range").field("value") .addRange(MIN_SAMPLE_VALUE, rangeBoundary) .addRange(rangeBoundary, MAX_SAMPLE_VALUE) - .subAggregation( - randomBuilder() - .field("value"))) + .subAggregation(randomBuilder().field("value")) + ) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -494,19 +452,16 @@ public void testAsSubAggregation() throws Exception { @Override public void testOrderByEmptyAggregation() throws Exception { final int numberOfBuckets = 10; - final SearchResponse response = client() - .prepareSearch("idx") + final SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - terms("terms") - .field("value") + terms("terms").field("value") .size(numberOfBuckets) .order(BucketOrder.compound(BucketOrder.aggregation("filter>mad", true))) .subAggregation( - filter("filter", termQuery("value", MAX_SAMPLE_VALUE + 1)) - .subAggregation( - randomBuilder() - .field("value")))) + filter("filter", termQuery("value", MAX_SAMPLE_VALUE + 1)).subAggregation(randomBuilder().field("value")) + ) + ) .get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -537,55 +492,132 @@ public void testOrderByEmptyAggregation() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx") - .setMapping("d", "type=long") - .setSettings(Settings.builder() - .put("requests.cache.enable", true) - .put("number_of_shards", 1) - .put("number_of_replicas", 1)) - .get()); - - indexRandom(true, + prepareCreate("cache_test_idx").setMapping("d", "type=long") + .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) + .get() + ); + + indexRandom( + true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(randomBuilder() - .field("d") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap()))).get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + randomBuilder().field("d") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(randomBuilder() - .field("d") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + randomBuilder().field("d") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(randomBuilder().field("d")).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index d3c8795b0d884..b4ccec896eca4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -79,22 +79,27 @@ public static class CustomScriptPlugin extends MockScriptPlugin { protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); - scripts.put("state['count'] = 1", vars -> - aggScript(vars, state -> state.put("count", 1))); - - scripts.put("state.list.add(1)", vars -> - aggScript(vars, state -> { - // Lazily populate state.list for tests without an init script - if (state.containsKey("list") == false) { - state.put("list", new ArrayList<>()); - } + scripts.put("state['count'] = 1", vars -> aggScript(vars, state -> state.put("count", 1))); + + scripts.put("state.list.add(1)", vars -> aggScript(vars, state -> { + // Lazily populate state.list for tests without an init script + if (state.containsKey("list") == false) { + state.put("list", new ArrayList<>()); + } - ((List) state.get("list")).add(1); - })); + ((List) state.get("list")).add(1); + })); - scripts.put("state[param1] = param2", vars -> - aggScript(vars, state -> state.put((String) XContentMapValues.extractValue("params.param1", vars), - XContentMapValues.extractValue("params.param2", vars)))); + scripts.put( + "state[param1] = param2", + vars -> aggScript( + vars, + state -> state.put( + (String) XContentMapValues.extractValue("params.param1", vars), + XContentMapValues.extractValue("params.param2", vars) + ) + ) + ); scripts.put("vars.multiplier = 3", vars -> { ((Map) vars.get("vars")).put("multiplier", 3); @@ -105,15 +110,14 @@ protected Map, Object>> pluginScripts() { return state; }); - scripts.put("state.list.add(vars.multiplier)", vars -> - aggScript(vars, state -> { - // Lazily populate state.list for tests without an init script - if (state.containsKey("list") == false) { - state.put("list", new ArrayList<>()); - } + scripts.put("state.list.add(vars.multiplier)", vars -> aggScript(vars, state -> { + // Lazily populate state.list for tests without an init script + if (state.containsKey("list") == false) { + state.put("list", new ArrayList<>()); + } - ((List) state.get("list")).add(XContentMapValues.extractValue("vars.multiplier", vars)); - })); + ((List) state.get("list")).add(XContentMapValues.extractValue("vars.multiplier", vars)); + })); /* * Equivalent to: @@ -184,7 +188,7 @@ protected Map, Object>> pluginScripts() { List> states = (List>) vars.get("states"); - if(states == null) { + if (states == null) { return newAggregation; } @@ -240,13 +244,12 @@ protected Map, Object>> pluginScripts() { protected Map, Object>> nonDeterministicPluginScripts() { Map, Object>> scripts = new HashMap<>(); - scripts.put("state.data = Math.random()", vars -> - aggScript(vars, state -> state.put("data", ScriptedMetricIT.randomDouble()))); - - - scripts.put("state['count'] = Math.random() >= 0.5 ? 1 : 0", vars -> - aggScript(vars, state -> state.put("count", ScriptedMetricIT.randomDouble() >= 0.5 ? 1 : 0))); + scripts.put("state.data = Math.random()", vars -> aggScript(vars, state -> state.put("data", ScriptedMetricIT.randomDouble()))); + scripts.put( + "state['count'] = Math.random() >= 0.5 ? 1 : 0", + vars -> aggScript(vars, state -> state.put("count", ScriptedMetricIT.randomDouble() >= 0.5 ? 1 : 0)) + ); scripts.put("return Math.random()", vars -> ScriptedMetricIT.randomDouble()); @@ -269,9 +272,13 @@ public void setupSuiteScopeCluster() throws Exception { numDocs = randomIntBetween(10, 100); for (int i = 0; i < numDocs; i++) { - builders.add(client().prepareIndex("idx").setId("" + i).setSource( - jsonBuilder().startObject().field("value", randomAlphaOfLengthBetween(5, 15)) - .field("l_value", i).endObject())); + builders.add( + client().prepareIndex("idx") + .setId("" + i) + .setSource( + jsonBuilder().startObject().field("value", randomAlphaOfLengthBetween(5, 15)).field("l_value", i).endObject() + ) + ); } indexRandom(true, builders); @@ -286,32 +293,73 @@ public void setupSuiteScopeCluster() throws Exception { prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource( - jsonBuilder().startObject().field("value", i * 2).endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) + ); } // When using the MockScriptPlugin we can map Stored scripts to inline scripts: // the id of the stored script is used in test method while the source of the stored script // must match a predefined script from CustomScriptPlugin.pluginScripts() method - assertAcked(client().admin().cluster().preparePutStoredScript() + assertAcked( + client().admin() + .cluster() + .preparePutStoredScript() .setId("initScript_stored") - .setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + - " \"source\": \"vars.multiplier = 3\"} }"), XContentType.JSON)); - - assertAcked(client().admin().cluster().preparePutStoredScript() + .setContent( + new BytesArray( + "{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + " \"source\": \"vars.multiplier = 3\"} }" + ), + XContentType.JSON + ) + ); + + assertAcked( + client().admin() + .cluster() + .preparePutStoredScript() .setId("mapScript_stored") - .setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + - " \"source\": \"state.list.add(vars.multiplier)\"} }"), XContentType.JSON)); - - assertAcked(client().admin().cluster().preparePutStoredScript() + .setContent( + new BytesArray( + "{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + " \"source\": \"state.list.add(vars.multiplier)\"} }" + ), + XContentType.JSON + ) + ); + + assertAcked( + client().admin() + .cluster() + .preparePutStoredScript() .setId("combineScript_stored") - .setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + - " \"source\": \"sum state values as a new aggregation\"} }"), XContentType.JSON)); - - assertAcked(client().admin().cluster().preparePutStoredScript() + .setContent( + new BytesArray( + "{\"script\": {\"lang\": \"" + + MockScriptPlugin.NAME + + "\"," + + " \"source\": \"sum state values as a new aggregation\"} }" + ), + XContentType.JSON + ) + ); + + assertAcked( + client().admin() + .cluster() + .preparePutStoredScript() .setId("reduceScript_stored") - .setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptPlugin.NAME + "\"," + - " \"source\": \"sum all states (lists) values as a new aggregation\"} }"), XContentType.JSON)); + .setContent( + new BytesArray( + "{\"script\": {\"lang\": \"" + + MockScriptPlugin.NAME + + "\"," + + " \"source\": \"sum all states (lists) values as a new aggregation\"} }" + ), + XContentType.JSON + ) + ); indexRandom(true, builders); ensureSearchable(); @@ -333,8 +381,10 @@ public void setUp() throws Exception { Files.write(scripts.resolve("init_script.mockscript"), "vars.multiplier = 3".getBytes("UTF-8")); Files.write(scripts.resolve("map_script.mockscript"), "state.list.add(vars.multiplier)".getBytes("UTF-8")); Files.write(scripts.resolve("combine_script.mockscript"), "sum state values as a new aggregation".getBytes("UTF-8")); - Files.write(scripts.resolve("reduce_script.mockscript"), - "sum all states (lists) values as a new aggregation".getBytes("UTF-8")); + Files.write( + scripts.resolve("reduce_script.mockscript"), + "sum all states (lists) values as a new aggregation".getBytes("UTF-8") + ); } catch (IOException e) { throw new RuntimeException("failed to create scripts"); } @@ -347,15 +397,13 @@ protected Path nodeConfigPath(int nodeOrdinal) { public void testMap() { Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "no-op list aggregation", Collections.emptyMap()); + Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); @@ -392,18 +440,17 @@ public void testMapWithParams() { Map aggregationParams = Collections.singletonMap("param2", 1); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state[param1] = param2", scriptParams); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "no-op list aggregation", Collections.emptyMap()); + Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(scriptedMetric("scripted") - .params(aggregationParams) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript)) + .addAggregation( + scriptedMetric("scripted").params(aggregationParams) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript) + ) .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); @@ -421,8 +468,8 @@ public void testMapWithParams() { for (Object object : aggregationList) { assertThat(object, notNullValue()); assertThat(object, instanceOf(Map.class)); - Map map = (Map) object; - for (Map.Entry entry : map.entrySet()) { + Map map = (Map) object; + for (Map.Entry entry : map.entrySet()) { assertThat(entry, notNullValue()); assertThat(entry.getKey(), notNullValue()); assertThat(entry.getKey(), instanceOf(String.class)); @@ -445,21 +492,18 @@ public void testInitMutatesParams() { Map params = new HashMap<>(); params.put("vars", varsMap); - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - scriptedMetric("scripted") - .params(params) - .initScript( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap())) - .mapScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "state.list.add(vars.multiplier)", Collections.emptyMap())) - .combineScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "no-op aggregation", Collections.emptyMap())) - .reduceScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "no-op list aggregation", Collections.emptyMap()))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted").params(params) + .initScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap())) + .mapScript( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", Collections.emptyMap()) + ) + .combineScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap())) + .reduceScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap())) + ) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); @@ -500,21 +544,20 @@ public void testMapCombineWithParams() { params.put("vars", varsMap); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "no-op list aggregation", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - scriptedMetric("scripted") - .params(params) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript)) - .get(); + Script combineScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum state values as a new aggregation", + Collections.emptyMap() + ); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) + ) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); @@ -554,24 +597,30 @@ public void testInitMapCombineWithParams() { params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", - Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "no-op list aggregation", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - scriptedMetric("scripted") - .params(params) - .initScript(initScript) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript)) - .get(); + Script mapScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "state.list.add(vars.multiplier)", + Collections.emptyMap() + ); + Script combineScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum state values as a new aggregation", + Collections.emptyMap() + ); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted").params(params) + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript) + ) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); @@ -611,24 +660,35 @@ public void testInitMapCombineReduceWithParams() { params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", - Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "sum all states (lists) values as a new aggregation", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - scriptedMetric("scripted") - .params(params) - .initScript(initScript) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript)) - .get(); + Script mapScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "state.list.add(vars.multiplier)", + Collections.emptyMap() + ); + Script combineScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum state values as a new aggregation", + Collections.emptyMap() + ); + Script reduceScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum all states (lists) values as a new aggregation", + Collections.emptyMap() + ); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted").params(params) + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript) + ) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); @@ -656,26 +716,37 @@ public void testInitMapCombineReduceGetProperty() throws Exception { params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", - Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "sum all states (lists) values as a new aggregation", Collections.emptyMap()); - - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - global("global") - .subAggregation( - scriptedMetric("scripted") - .params(params) - .initScript(initScript) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript))) - .get(); + Script mapScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "state.list.add(vars.multiplier)", + Collections.emptyMap() + ); + Script combineScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum state values as a new aggregation", + Collections.emptyMap() + ); + Script reduceScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum all states (lists) values as a new aggregation", + Collections.emptyMap() + ); + + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + global("global").subAggregation( + scriptedMetric("scripted").params(params) + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript) + ) + ) + .get(); assertSearchResponse(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numDocs)); @@ -698,9 +769,9 @@ public void testInitMapCombineReduceGetProperty() throws Exception { assertThat(object, notNullValue()); assertThat(object, instanceOf(Number.class)); assertThat(((Number) object).longValue(), equalTo(numDocs * 3)); - assertThat(((InternalAggregation)global).getProperty("scripted"), sameInstance(scriptedMetricAggregation)); - assertThat((List) ((InternalAggregation)global).getProperty("scripted.value"), sameInstance(aggregationList)); - assertThat((List) ((InternalAggregation)scriptedMetricAggregation).getProperty("value"), sameInstance(aggregationList)); + assertThat(((InternalAggregation) global).getProperty("scripted"), sameInstance(scriptedMetricAggregation)); + assertThat((List) ((InternalAggregation) global).getProperty("scripted.value"), sameInstance(aggregationList)); + assertThat((List) ((InternalAggregation) scriptedMetricAggregation).getProperty("value"), sameInstance(aggregationList)); } public void testMapCombineReduceWithParams() { @@ -710,23 +781,31 @@ public void testMapCombineReduceWithParams() { Map params = new HashMap<>(); params.put("vars", varsMap); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", - Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "sum all states (lists) values as a new aggregation", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - scriptedMetric("scripted") - .params(params) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript)) - .get(); + Script mapScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "state.list.add(vars.multiplier)", + Collections.emptyMap() + ); + Script combineScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum state values as a new aggregation", + Collections.emptyMap() + ); + Script reduceScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum all states (lists) values as a new aggregation", + Collections.emptyMap() + ); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) + ) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); @@ -753,24 +832,30 @@ public void testInitMapReduceWithParams() { params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", - Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "sum all states' state.list values as a new aggregation", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - scriptedMetric("scripted") - .params(params) - .initScript(initScript) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript)) - .get(); + Script mapScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "state.list.add(vars.multiplier)", + Collections.emptyMap() + ); + Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); + Script reduceScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum all states' state.list values as a new aggregation", + Collections.emptyMap() + ); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted").params(params) + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript) + ) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); @@ -795,23 +880,26 @@ public void testMapReduceWithParams() { Map params = new HashMap<>(); params.put("vars", varsMap); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", - Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "sum all states' state.list values as a new aggregation", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - scriptedMetric("scripted") - .params(params) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript)) - .get(); + Script mapScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "state.list.add(vars.multiplier)", + Collections.emptyMap() + ); + Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); + Script reduceScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum all states' state.list values as a new aggregation", + Collections.emptyMap() + ); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) + ) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); @@ -841,24 +929,35 @@ public void testInitMapCombineReduceWithParamsAndReduceParams() { reduceParams.put("multiplier", 4); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", - Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "multiplied sum all states (lists) values as a new aggregation", reduceParams); - - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - scriptedMetric("scripted") - .params(params) - .initScript(initScript) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript)) - .get(); + Script mapScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "state.list.add(vars.multiplier)", + Collections.emptyMap() + ); + Script combineScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum state values as a new aggregation", + Collections.emptyMap() + ); + Script reduceScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "multiplied sum all states (lists) values as a new aggregation", + reduceParams + ); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted").params(params) + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript) + ) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); @@ -884,21 +983,16 @@ public void testInitMapCombineReduceWithParamsStored() { Map params = new HashMap<>(); params.put("vars", varsMap); - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - scriptedMetric("scripted") - .params(params) - .initScript( - new Script(ScriptType.STORED, null, "initScript_stored", Collections.emptyMap())) - .mapScript( - new Script(ScriptType.STORED, null, "mapScript_stored", Collections.emptyMap())) - .combineScript( - new Script(ScriptType.STORED, null, "combineScript_stored", Collections.emptyMap())) - .reduceScript( - new Script(ScriptType.STORED, null, "reduceScript_stored", Collections.emptyMap()))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted").params(params) + .initScript(new Script(ScriptType.STORED, null, "initScript_stored", Collections.emptyMap())) + .mapScript(new Script(ScriptType.STORED, null, "mapScript_stored", Collections.emptyMap())) + .combineScript(new Script(ScriptType.STORED, null, "combineScript_stored", Collections.emptyMap())) + .reduceScript(new Script(ScriptType.STORED, null, "reduceScript_stored", Collections.emptyMap())) + ) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); @@ -925,28 +1019,40 @@ public void testInitMapCombineReduceWithParamsAsSubAgg() { params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", - Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "sum all states (lists) values as a new aggregation", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()).setSize(1000) - .addAggregation( - histogram("histo") - .field("l_value") - .interval(1) - .subAggregation( - scriptedMetric("scripted") - .params(params) - .initScript(initScript) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript))) - .get(); + Script mapScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "state.list.add(vars.multiplier)", + Collections.emptyMap() + ); + Script combineScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum state values as a new aggregation", + Collections.emptyMap() + ); + Script reduceScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum all states (lists) values as a new aggregation", + Collections.emptyMap() + ); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .setSize(1000) + .addAggregation( + histogram("histo").field("l_value") + .interval(1) + .subAggregation( + scriptedMetric("scripted").params(params) + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript) + ) + ) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("histo"); @@ -986,24 +1092,40 @@ public void testEmptyAggregation() throws Exception { params.put("vars", varsMap); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); - Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", - Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "sum all states (lists) values as a new aggregation", Collections.emptyMap()); + Script mapScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "state.list.add(vars.multiplier)", + Collections.emptyMap() + ); + Script combineScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum state values as a new aggregation", + Collections.emptyMap() + ); + Script reduceScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "sum all states (lists) values as a new aggregation", + Collections.emptyMap() + ); SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) - .subAggregation( - scriptedMetric("scripted") - .params(params) - .initScript(initScript) - .mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation( + scriptedMetric("scripted").params(params) + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript) + ) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -1022,106 +1144,224 @@ public void testEmptyAggregation() throws Exception { assertThat(aggregationResult.get(0), equalTo(0)); } - /** * Make sure that a request using a deterministic script gets cached and nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap()); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "no-op list aggregation", Collections.emptyMap()); + Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()); - Script ndInitScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.data = Math.random()", - Collections.emptyMap()); + Script ndInitScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.data = Math.random()", Collections.emptyMap()); - Script ndMapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = Math.random() >= 0.5 ? 1 : 0", - Collections.emptyMap()); + Script ndMapScript = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "state['count'] = Math.random() >= 0.5 ? 1 : 0", + Collections.emptyMap() + ); - Script ndRandom = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return Math.random()", - Collections.emptyMap()); + Script ndRandom = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return Math.random()", Collections.emptyMap()); - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a non-deterministic init script causes the result to not be cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(scriptedMetric("foo").initScript(ndInitScript).mapScript(mapScript).combineScript(combineScript) - .reduceScript(reduceScript)).get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + scriptedMetric("foo").initScript(ndInitScript).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a non-deterministic map script causes the result to not be cached - r = client().prepareSearch("cache_test_idx").setSize(0) + r = client().prepareSearch("cache_test_idx") + .setSize(0) .addAggregation(scriptedMetric("foo").mapScript(ndMapScript).combineScript(combineScript).reduceScript(reduceScript)) .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a non-deterministic combine script causes the result to not be cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(ndRandom).reduceScript(reduceScript)).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(ndRandom).reduceScript(reduceScript)) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // NOTE: random reduce scripts don't hit the query shard context (they are done on the coordinator) and so can be cached. - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(ndRandom)).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(ndRandom)) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Test that all deterministic scripts cause the request to be cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } public void testConflictingAggAndScriptParams() { Map params = Collections.singletonMap("param1", "12"); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", params); - Script combineScript = - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); - Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "no-op list aggregation", Collections.emptyMap()); + Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); + Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()); SearchRequestBuilder builder = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(scriptedMetric("scripted") - .params(params).mapScript(mapScript) - .combineScript(combineScript) - .reduceScript(reduceScript)); + .addAggregation( + scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) + ); SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, builder::get); assertThat(ex.getCause().getMessage(), containsString("Parameter name \"param1\" used in both aggregation and script parameters")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java index f3512e9a7edfa..bbbdc41cadfa2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java @@ -15,12 +15,12 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Collection; import java.util.Collections; @@ -50,9 +50,9 @@ protected Collection> nodePlugins() { @Override public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(stats("stats").field("value"))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(stats("stats").field("value"))) + .get(); assertShardExecutionState(searchResponse, 0); @@ -75,9 +75,9 @@ public void testEmptyAggregation() throws Exception { @Override public void testSingleValuedField() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(stats("stats").field("value")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(stats("stats").field("value")) + .get(); assertShardExecutionState(searchResponse, 0); @@ -86,17 +86,19 @@ public void testSingleValuedField() throws Exception { Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10) / 10)); + assertThat(stats.getAvg(), equalTo((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10)); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(10.0)); - assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10)); + assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10)); assertThat(stats.getCount(), equalTo(10L)); } public void testSingleValuedField_WithFormatter() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(stats("stats").format("0000.0").field("value")).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(stats("stats").format("0000.0").field("value")) + .get(); assertHitCount(searchResponse, 10); @@ -116,8 +118,10 @@ public void testSingleValuedField_WithFormatter() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(global("global").subAggregation(stats("stats").field("value"))).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(global("global").subAggregation(stats("stats").field("value"))) + .get(); assertHitCount(searchResponse, 10); @@ -131,32 +135,32 @@ public void testSingleValuedFieldGetProperty() throws Exception { Stats stats = global.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - Stats statsFromProperty = (Stats) ((InternalAggregation)global).getProperty("stats"); + Stats statsFromProperty = (Stats) ((InternalAggregation) global).getProperty("stats"); assertThat(statsFromProperty, notNullValue()); assertThat(statsFromProperty, sameInstance(stats)); double expectedAvgValue = (double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10; assertThat(stats.getAvg(), equalTo(expectedAvgValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.avg"), equalTo(expectedAvgValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.avg"), equalTo(expectedAvgValue)); double expectedMinValue = 1.0; assertThat(stats.getMin(), equalTo(expectedMinValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.min"), equalTo(expectedMinValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.min"), equalTo(expectedMinValue)); double expectedMaxValue = 10.0; assertThat(stats.getMax(), equalTo(expectedMaxValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.max"), equalTo(expectedMaxValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.max"), equalTo(expectedMaxValue)); double expectedSumValue = 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10; assertThat(stats.getSum(), equalTo(expectedSumValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.sum"), equalTo(expectedSumValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.sum"), equalTo(expectedSumValue)); long expectedCountValue = 10; assertThat(stats.getCount(), equalTo(expectedCountValue)); - assertThat((double) ((InternalAggregation)global).getProperty("stats.count"), equalTo((double) expectedCountValue)); + assertThat((double) ((InternalAggregation) global).getProperty("stats.count"), equalTo((double) expectedCountValue)); } @Override public void testMultiValuedField() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(stats("stats").field("values")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(stats("stats").field("values")) + .get(); assertShardExecutionState(searchResponse, 0); @@ -165,19 +169,26 @@ public void testMultiValuedField() throws Exception { Stats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); - assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12) / 20)); + assertThat( + stats.getAvg(), + equalTo((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12) / 20) + ); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(12.0)); - assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12)); + assertThat(stats.getSum(), equalTo((double) 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12)); assertThat(stats.getCount(), equalTo(20L)); } @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>stats.avg", true))) - .subAggregation(filter("filter", termQuery("value", 100)).subAggregation(stats("stats").field("value")))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + terms("terms").field("value") + .order(BucketOrder.compound(BucketOrder.aggregation("filter>stats.avg", true))) + .subAggregation(filter("filter", termQuery("value", 100)).subAggregation(stats("stats").field("value"))) + ) + .get(); assertHitCount(searchResponse, 10); @@ -222,47 +233,132 @@ private void assertShardExecutionState(SearchResponse response, int expectedFail * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - stats("foo").field("d").script( - new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", Collections.emptyMap()))).get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + stats("foo").field("d") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( - stats("foo").field("d").script( - new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap()))).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + stats("foo").field("d") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(stats("foo").field("d")).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index 097bd20deb1c4..efca97589e8e1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -55,17 +55,15 @@ public void setupSuiteScopeCluster() throws Exception { // Create two indices and add the field 'route_length_miles' as an alias in // one, and a concrete field in the other. - prepareCreate("old_index") - .setMapping( - "transit_mode", "type=keyword", - "distance", "type=double", - "route_length_miles", "type=alias,path=distance") - .get(); - prepareCreate("new_index") - .setMapping( - "transit_mode", "type=keyword", - "route_length_miles", "type=double") - .get(); + prepareCreate("old_index").setMapping( + "transit_mode", + "type=keyword", + "distance", + "type=double", + "route_length_miles", + "type=alias,path=distance" + ).get(); + prepareCreate("new_index").setMapping("transit_mode", "type=keyword", "route_length_miles", "type=double").get(); List builders = new ArrayList<>(); builders.add(client().prepareIndex("old_index").setSource("transit_mode", "train", "distance", 42.0)); @@ -80,9 +78,9 @@ public void setupSuiteScopeCluster() throws Exception { public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(sum("sum").field("value"))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(sum("sum").field("value"))) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -103,21 +101,23 @@ public void testUnmapped() throws Exception {} @Override public void testSingleValuedField() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(sum("sum").field("value")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(sum("sum").field("value")) + .get(); assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getName(), equalTo("sum")); - assertThat(sum.getValue(), equalTo((double) 1+2+3+4+5+6+7+8+9+10)); + assertThat(sum.getValue(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10)); } public void testSingleValuedFieldWithFormatter() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(sum("sum").format("0000.0").field("value")).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(sum("sum").format("0000.0").field("value")) + .get(); assertHitCount(searchResponse, 10); @@ -131,8 +131,10 @@ public void testSingleValuedFieldWithFormatter() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(global("global").subAggregation(sum("sum").field("value"))).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(global("global").subAggregation(sum("sum").field("value"))) + .get(); assertHitCount(searchResponse, 10); @@ -148,33 +150,37 @@ public void testSingleValuedFieldGetProperty() throws Exception { assertThat(sum.getName(), equalTo("sum")); double expectedSumValue = (double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10; assertThat(sum.getValue(), equalTo(expectedSumValue)); - assertThat((Sum) ((InternalAggregation)global).getProperty("sum"), equalTo(sum)); - assertThat((double) ((InternalAggregation)global).getProperty("sum.value"), equalTo(expectedSumValue)); - assertThat((double) ((InternalAggregation)sum).getProperty("value"), equalTo(expectedSumValue)); + assertThat((Sum) ((InternalAggregation) global).getProperty("sum"), equalTo(sum)); + assertThat((double) ((InternalAggregation) global).getProperty("sum.value"), equalTo(expectedSumValue)); + assertThat((double) ((InternalAggregation) sum).getProperty("value"), equalTo(expectedSumValue)); } @Override public void testMultiValuedField() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(sum("sum").field("values")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(sum("sum").field("values")) + .get(); assertHitCount(searchResponse, 10); Sum sum = searchResponse.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getName(), equalTo("sum")); - assertThat(sum.getValue(), equalTo((double) 2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12)); + assertThat(sum.getValue(), equalTo((double) 2 + 3 + 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12)); } @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>sum", true))) - .subAggregation(filter("filter", termQuery("value", 100)).subAggregation(sum("sum").field("value")))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + terms("terms").field("value") + .order(BucketOrder.compound(BucketOrder.aggregation("filter>sum", true))) + .subAggregation(filter("filter", termQuery("value", 100)).subAggregation(sum("sum").field("value"))) + ) + .get(); assertHitCount(searchResponse, 10); @@ -204,54 +210,136 @@ public void testOrderByEmptyAggregation() throws Exception { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(sum("foo").field("d").script( - new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, RANDOM_SCRIPT, Collections.emptyMap()))).get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + sum("foo").field("d").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, RANDOM_SCRIPT, Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(sum("foo").field("d").script( - new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, VALUE_SCRIPT, Collections.emptyMap()))).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + sum("foo").field("d").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, VALUE_SCRIPT, Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(sum("foo").field("d")).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } public void testFieldAlias() { SearchResponse response = client().prepareSearch("old_index", "new_index") - .addAggregation(sum("sum") - .field("route_length_miles")) + .addAggregation(sum("sum").field("route_length_miles")) .get(); assertSearchResponse(response); @@ -262,12 +350,9 @@ public void testFieldAlias() { assertThat(sum.getValue(), equalTo(192.7)); } - public void testFieldAliasInSubAggregation() { + public void testFieldAliasInSubAggregation() { SearchResponse response = client().prepareSearch("old_index", "new_index") - .addAggregation(terms("terms") - .field("transit_mode") - .subAggregation(sum("sum") - .field("route_length_miles"))) + .addAggregation(terms("terms").field("transit_mode").subAggregation(sum("sum").field("route_length_miles"))) .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 21db90ca9ebfb..7a8cb1fc0d1e4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -15,12 +15,12 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Arrays; import java.util.Collection; @@ -58,15 +58,15 @@ private static double[] randomPercents(long minValue, long maxValue) { final double[] percents = new double[length]; for (int i = 0; i < percents.length; ++i) { switch (randomInt(20)) { - case 0: - percents[i] = minValue; - break; - case 1: - percents[i] = maxValue; - break; - default: - percents[i] = (randomDouble() * (maxValue - minValue)) + minValue; - break; + case 0: + percents[i] = minValue; + break; + case 1: + percents[i] = maxValue; + break; + default: + percents[i] = (randomDouble() * (maxValue - minValue)) + minValue; + break; } } Arrays.sort(percents); @@ -103,10 +103,14 @@ private void assertConsistent(double[] pcts, PercentileRanks values, long minVal @Override public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) - .subAggregation(randomCompression(percentileRanks("percentile_ranks", new double[]{10,15}).field("value")))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 10, 15 }).field("value"))) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -123,33 +127,34 @@ public void testEmptyAggregation() throws Exception { public void testNullValuesField() throws Exception { final double[] pcts = null; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.TDIGEST).field("value")) - .get()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.TDIGEST).field("value")) + .get() + ); assertThat(e.getMessage(), equalTo("[values] must not be null: [percentile_ranks]")); } public void testEmptyValuesField() throws Exception { final double[] pcts = new double[0]; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.TDIGEST).field("value")) - .get()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.TDIGEST).field("value")) + .get() + ); assertThat(e.getMessage(), equalTo("[values] must not be an empty array: [percentile_ranks]")); } @Override public void testUnmapped() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks", new double[]{0, 10, 15, 100})) - .field("value")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 0, 10, 15, 100 })).field("value")) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); @@ -166,10 +171,9 @@ public void testUnmapped() throws Exception { public void testSingleValuedField() throws Exception { final double[] pcts = randomPercents(minValue, maxValue); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)) - .field("value")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).field("value")) + .get(); assertHitCount(searchResponse, 10); @@ -180,12 +184,10 @@ public void testSingleValuedField() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - global("global").subAggregation( - randomCompression(percentileRanks("percentile_ranks", pcts)).field("value"))).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(global("global").subAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).field("value"))) + .get(); assertHitCount(searchResponse, 10); @@ -199,16 +201,15 @@ public void testSingleValuedFieldGetProperty() throws Exception { PercentileRanks values = global.getAggregations().get("percentile_ranks"); assertThat(values, notNullValue()); assertThat(values.getName(), equalTo("percentile_ranks")); - assertThat(((InternalAggregation)global).getProperty("percentile_ranks"), sameInstance(values)); + assertThat(((InternalAggregation) global).getProperty("percentile_ranks"), sameInstance(values)); } public void testSingleValuedFieldOutsideRange() throws Exception { - final double[] pcts = new double[] {minValue - 1, maxValue + 1}; + final double[] pcts = new double[] { minValue - 1, maxValue + 1 }; SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)) - .field("value")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).field("value")) + .get(); assertHitCount(searchResponse, 10); @@ -220,10 +221,9 @@ public void testSingleValuedFieldOutsideRange() throws Exception { public void testSingleValuedFieldPartiallyUnmapped() throws Exception { final double[] pcts = randomPercents(minValue, maxValue); SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)) - .field("value")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).field("value")) + .get(); assertHitCount(searchResponse, 10); @@ -235,13 +235,12 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { public void testSingleValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercents(minValue - 1, maxValue - 1); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentileRanks("percentile_ranks", pcts)) - .field("value") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + randomCompression(percentileRanks("percentile_ranks", pcts)).field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + ) + .get(); assertHitCount(searchResponse, 10); @@ -255,13 +254,12 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { params.put("dec", 1); final double[] pcts = randomPercents(minValue - 1, maxValue - 1); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentileRanks("percentile_ranks", pcts)) - .field("value") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + randomCompression(percentileRanks("percentile_ranks", pcts)).field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) + ) + .get(); assertHitCount(searchResponse, 10); @@ -273,10 +271,9 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { public void testMultiValuedField() throws Exception { final double[] pcts = randomPercents(minValues, maxValues); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)) - .field("values")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).field("values")) + .get(); assertHitCount(searchResponse, 10); @@ -288,13 +285,12 @@ public void testMultiValuedField() throws Exception { public void testMultiValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercents(minValues - 1, maxValues - 1); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentileRanks("percentile_ranks", pcts)) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + randomCompression(percentileRanks("percentile_ranks", pcts)).field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + ) + .get(); assertHitCount(searchResponse, 10); @@ -305,13 +301,12 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScriptReverse() throws Exception { final double[] pcts = randomPercents(-maxValues, -minValues); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentileRanks("percentile_ranks", pcts)) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value * -1", emptyMap()))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + randomCompression(percentileRanks("percentile_ranks", pcts)).field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value * -1", emptyMap())) + ) + .get(); assertHitCount(searchResponse, 10); @@ -325,13 +320,12 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { params.put("dec", 1); final double[] pcts = randomPercents(minValues - 1, maxValues - 1); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentileRanks("percentile_ranks", pcts)) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + randomCompression(percentileRanks("percentile_ranks", pcts)).field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) + ) + .get(); assertHitCount(searchResponse, 10); @@ -343,12 +337,13 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { public void testScriptSingleValued() throws Exception { final double[] pcts = randomPercents(minValue, maxValue); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentileRanks("percentile_ranks", pcts)) - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + randomCompression(percentileRanks("percentile_ranks", pcts)).script( + new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()) + ) + ) + .get(); assertHitCount(searchResponse, 10); @@ -365,11 +360,9 @@ public void testScriptSingleValuedWithParams() throws Exception { final double[] pcts = randomPercents(minValue - 1, maxValue - 1); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentileRanks("percentile_ranks", pcts)).script(script)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).script(script)) + .get(); assertHitCount(searchResponse, 10); @@ -382,12 +375,9 @@ public void testScriptMultiValued() throws Exception { final double[] pcts = randomPercents(minValues, maxValues); Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", emptyMap()); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentileRanks("percentile_ranks", pcts)) - .script(script)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).script(script)) + .get(); assertHitCount(searchResponse, 10); @@ -401,12 +391,9 @@ public void testScriptMultiValuedWithParams() throws Exception { final double[] pcts = randomPercents(minValues - 1, maxValues - 1); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentileRanks("percentile_ranks", pcts)) - .script(script)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).script(script)) + .get(); assertHitCount(searchResponse, 10); @@ -417,12 +404,14 @@ public void testScriptMultiValuedWithParams() throws Exception { public void testOrderBySubAggregation() { boolean asc = randomBoolean(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field("value").interval(2L) - .subAggregation(randomCompression(percentileRanks("percentile_ranks", new double[]{99}).field("value"))) - .order(BucketOrder.aggregation("percentile_ranks", "99", asc))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(2L) + .subAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 99 }).field("value"))) + .order(BucketOrder.aggregation("percentile_ranks", "99", asc)) + ) + .get(); assertHitCount(searchResponse, 10); @@ -442,12 +431,18 @@ public void testOrderBySubAggregation() { @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>ranks.99", true))) - .subAggregation(filter("filter", termQuery("value", 100)) - .subAggregation(percentileRanks("ranks", new double[]{99}) - .method(PercentilesMethod.TDIGEST).field("value")))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + terms("terms").field("value") + .order(BucketOrder.compound(BucketOrder.aggregation("filter>ranks.99", true))) + .subAggregation( + filter("filter", termQuery("value", 100)).subAggregation( + percentileRanks("ranks", new double[] { 99 }).method(PercentilesMethod.TDIGEST).field("value") + ) + ) + ) + .get(); assertHitCount(searchResponse, 10); @@ -477,50 +472,136 @@ public void testOrderByEmptyAggregation() throws Exception { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(percentileRanks("foo", new double[]{50.0}) - .field("d") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap()))).get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + percentileRanks("foo", new double[] { 50.0 }).field("d") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(percentileRanks("foo", new double[]{50.0}) - .field("d") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + percentileRanks("foo", new double[] { 50.0 }).field("d") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(percentileRanks("foo", new double[]{50.0}).field("d")).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation(percentileRanks("foo", new double[] { 50.0 }).field("d")) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index d397243ecb8ba..d1d89d7c02740 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -60,18 +60,18 @@ private static double[] randomPercentiles() { final Set uniquedPercentiles = new HashSet<>(); for (int i = 0; i < length; ++i) { switch (randomInt(20)) { - case 0: - uniquedPercentiles.add(0.0); - break; - case 1: - uniquedPercentiles.add(100.0); - break; - default: - uniquedPercentiles.add(randomDouble() * 100); - break; + case 0: + uniquedPercentiles.add(0.0); + break; + case 1: + uniquedPercentiles.add(100.0); + break; + default: + uniquedPercentiles.add(randomDouble() * 100); + break; } } - double[] percentiles= uniquedPercentiles.stream().mapToDouble(Double::doubleValue).sorted().toArray(); + double[] percentiles = uniquedPercentiles.stream().mapToDouble(Double::doubleValue).sorted().toArray(); LogManager.getLogger(TDigestPercentilesIT.class).info("Using percentiles={}", Arrays.toString(percentiles)); return percentiles; } @@ -109,11 +109,14 @@ private void assertConsistent(double[] pcts, Percentiles percentiles, long minVa @Override public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) - .subAggregation(randomCompression(percentiles("percentiles").field("value")) - .percentiles(10, 15))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation(randomCompression(percentiles("percentiles").field("value")).percentiles(10, 15)) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -131,11 +134,9 @@ public void testEmptyAggregation() throws Exception { @Override public void testUnmapped() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) - .field("value") - .percentiles(0, 10, 15, 100)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(0, 10, 15, 100)) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); @@ -152,11 +153,9 @@ public void testUnmapped() throws Exception { public void testSingleValuedField() throws Exception { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) - .field("value") - .percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(pcts)) + .get(); assertHitCount(searchResponse, 10); @@ -167,12 +166,10 @@ public void testSingleValuedField() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client() - .prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - global("global").subAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(pcts))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(global("global").subAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(pcts))) + .get(); assertHitCount(searchResponse, 10); @@ -186,18 +183,16 @@ public void testSingleValuedFieldGetProperty() throws Exception { Percentiles percentiles = global.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); assertThat(percentiles.getName(), equalTo("percentiles")); - assertThat(((InternalAggregation)global).getProperty("percentiles"), sameInstance(percentiles)); + assertThat(((InternalAggregation) global).getProperty("percentiles"), sameInstance(percentiles)); } @Override public void testSingleValuedFieldPartiallyUnmapped() throws Exception { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) - .field("value") - .percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(pcts)) + .get(); assertHitCount(searchResponse, 10); @@ -209,14 +204,13 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { public void testSingleValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentiles("percentiles")) - .field("value") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) - .percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + randomCompression(percentiles("percentiles")).field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -230,14 +224,13 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { params.put("dec", 1); final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentiles("percentiles")) - .field("value") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) - .percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + randomCompression(percentiles("percentiles")).field("value") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -249,9 +242,9 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { public void testMultiValuedField() throws Exception { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")).field("values").percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentiles("percentiles")).field("values").percentiles(pcts)) + .get(); assertHitCount(searchResponse, 10); @@ -263,14 +256,13 @@ public void testMultiValuedField() throws Exception { public void testMultiValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentiles("percentiles")) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) - .percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + randomCompression(percentiles("percentiles")).field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -281,14 +273,13 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScriptReverse() throws Exception { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentiles("percentiles")) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value * -1", emptyMap())) - .percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + randomCompression(percentiles("percentiles")).field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value * -1", emptyMap())) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -302,14 +293,13 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { params.put("dec", 1); final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentiles("percentiles")) - .field("values") - .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) - .percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + randomCompression(percentiles("percentiles")).field("values") + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) + .percentiles(pcts) + ) + .get(); assertHitCount(searchResponse, 10); @@ -322,13 +312,9 @@ public void testScriptSingleValued() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()); final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentiles("percentiles")) - .script(script) - .percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentiles("percentiles")).script(script).percentiles(pcts)) + .get(); assertHitCount(searchResponse, 10); @@ -345,13 +331,9 @@ public void testScriptSingleValuedWithParams() throws Exception { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentiles("percentiles")) - .script(script) - .percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentiles("percentiles")).script(script).percentiles(pcts)) + .get(); assertHitCount(searchResponse, 10); @@ -365,13 +347,9 @@ public void testScriptMultiValued() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", emptyMap()); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentiles("percentiles")) - .script(script) - .percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentiles("percentiles")).script(script).percentiles(pcts)) + .get(); assertHitCount(searchResponse, 10); @@ -385,13 +363,9 @@ public void testScriptMultiValuedWithParams() throws Exception { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - randomCompression( - percentiles("percentiles")) - .script(script) - .percentiles(pcts)) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(randomCompression(percentiles("percentiles")).script(script).percentiles(pcts)) + .get(); assertHitCount(searchResponse, 10); @@ -402,12 +376,14 @@ public void testScriptMultiValuedWithParams() throws Exception { public void testOrderBySubAggregation() { boolean asc = randomBoolean(); SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field("value").interval(2L) - .subAggregation(randomCompression(percentiles("percentiles").field("value").percentiles(99))) - .order(BucketOrder.aggregation("percentiles", "99", asc))) - .get(); + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(2L) + .subAggregation(randomCompression(percentiles("percentiles").field("value").percentiles(99))) + .order(BucketOrder.aggregation("percentiles", "99", asc)) + ) + .get(); assertHitCount(searchResponse, 10); @@ -427,12 +403,18 @@ public void testOrderBySubAggregation() { @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation( - terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>percentiles.99", true))) - .subAggregation(filter("filter", termQuery("value", 100)) - .subAggregation(percentiles("percentiles").method(PercentilesMethod.TDIGEST).field("value")))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + terms("terms").field("value") + .order(BucketOrder.compound(BucketOrder.aggregation("filter>percentiles.99", true))) + .subAggregation( + filter("filter", termQuery("value", 100)).subAggregation( + percentiles("percentiles").method(PercentilesMethod.TDIGEST).field("value") + ) + ) + ) + .get(); assertHitCount(searchResponse, 10); @@ -462,47 +444,134 @@ public void testOrderByEmptyAggregation() throws Exception { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(percentiles("foo").field("d") - .percentiles(50.0).script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap()))) - .get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + percentiles("foo").field("d") + .percentiles(50.0) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(percentiles("foo").field("d") - .percentiles(50.0).script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + percentiles("foo").field("d") + .percentiles(50.0) + .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(percentiles("foo").field("d").percentiles(50.0)).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 86431a7711d08..ef7d24495abd2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -114,99 +114,108 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("idx").setMapping(TERMS_AGGS_FIELD, "type=keyword")); assertAcked(prepareCreate("field-collapsing").setMapping("group", "type=keyword")); createIndex("empty"); - assertAcked(prepareCreate("articles").setMapping( - jsonBuilder().startObject().startObject("_doc").startObject("properties") - .startObject(TERMS_AGGS_FIELD) + assertAcked( + prepareCreate("articles").setMapping( + jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject(TERMS_AGGS_FIELD) .field("type", "keyword") - .endObject() - .startObject("comments") + .endObject() + .startObject("comments") .field("type", "nested") .startObject("properties") - .startObject("user") - .field("type", "keyword") - .endObject() - .startObject("date") - .field("type", "long") - .endObject() - .startObject("message") - .field("type", "text") - .field("store", true) - .field("term_vector", "with_positions_offsets") - .field("index_options", "offsets") - .endObject() - .startObject("reviewers") - .field("type", "nested") - .startObject("properties") - .startObject("name") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() + .startObject("user") + .field("type", "keyword") + .endObject() + .startObject("date") + .field("type", "long") + .endObject() + .startObject("message") + .field("type", "text") + .field("store", true) + .field("term_vector", "with_positions_offsets") + .field("index_options", "offsets") + .endObject() + .startObject("reviewers") + .field("type", "nested") + .startObject("properties") + .startObject("name") + .field("type", "keyword") + .endObject() .endObject() - .endObject() - .endObject().endObject().endObject())); + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); ensureGreen("idx", "empty", "articles"); List builders = new ArrayList<>(); for (int i = 0; i < 50; i++) { - builders.add(client().prepareIndex("idx").setId(Integer.toString(i)).setSource(jsonBuilder() - .startObject() - .field(TERMS_AGGS_FIELD, "val" + (i / 10)) - .field(SORT_FIELD, i + 1) - .field("text", "some text to entertain") - .field("field1", 5) - .field("field2", 2.71) - .endObject())); + builders.add( + client().prepareIndex("idx") + .setId(Integer.toString(i)) + .setSource( + jsonBuilder().startObject() + .field(TERMS_AGGS_FIELD, "val" + (i / 10)) + .field(SORT_FIELD, i + 1) + .field("text", "some text to entertain") + .field("field1", 5) + .field("field2", 2.71) + .endObject() + ) + ); } - builders.add(client().prepareIndex("field-collapsing").setId("1").setSource(jsonBuilder() - .startObject() - .field("group", "a") - .field("text", "term x y z b") - .endObject())); - builders.add(client().prepareIndex("field-collapsing").setId("2").setSource(jsonBuilder() - .startObject() - .field("group", "a") - .field("text", "term x y z n rare") - .field("value", 1) - .endObject())); - builders.add(client().prepareIndex("field-collapsing").setId("3").setSource(jsonBuilder() - .startObject() - .field("group", "b") - .field("text", "x y z term") - .endObject())); - builders.add(client().prepareIndex("field-collapsing").setId("4").setSource(jsonBuilder() - .startObject() - .field("group", "b") - .field("text", "x y term") - .endObject())); - builders.add(client().prepareIndex("field-collapsing").setId("5").setSource(jsonBuilder() - .startObject() - .field("group", "b") - .field("text", "x term") - .endObject())); - builders.add(client().prepareIndex("field-collapsing").setId("6").setSource(jsonBuilder() - .startObject() - .field("group", "b") - .field("text", "term rare") - .field("value", 3) - .endObject())); - builders.add(client().prepareIndex("field-collapsing").setId("7").setSource(jsonBuilder() - .startObject() - .field("group", "c") - .field("text", "x y z term") - .endObject())); - builders.add(client().prepareIndex("field-collapsing").setId("8").setSource(jsonBuilder() - .startObject() - .field("group", "c") - .field("text", "x y term b") - .endObject())); - builders.add(client().prepareIndex("field-collapsing").setId("9").setSource(jsonBuilder() - .startObject() - .field("group", "c") - .field("text", "rare x term") - .field("value", 2) - .endObject())); + builders.add( + client().prepareIndex("field-collapsing") + .setId("1") + .setSource(jsonBuilder().startObject().field("group", "a").field("text", "term x y z b").endObject()) + ); + builders.add( + client().prepareIndex("field-collapsing") + .setId("2") + .setSource(jsonBuilder().startObject().field("group", "a").field("text", "term x y z n rare").field("value", 1).endObject()) + ); + builders.add( + client().prepareIndex("field-collapsing") + .setId("3") + .setSource(jsonBuilder().startObject().field("group", "b").field("text", "x y z term").endObject()) + ); + builders.add( + client().prepareIndex("field-collapsing") + .setId("4") + .setSource(jsonBuilder().startObject().field("group", "b").field("text", "x y term").endObject()) + ); + builders.add( + client().prepareIndex("field-collapsing") + .setId("5") + .setSource(jsonBuilder().startObject().field("group", "b").field("text", "x term").endObject()) + ); + builders.add( + client().prepareIndex("field-collapsing") + .setId("6") + .setSource(jsonBuilder().startObject().field("group", "b").field("text", "term rare").field("value", 3).endObject()) + ); + builders.add( + client().prepareIndex("field-collapsing") + .setId("7") + .setSource(jsonBuilder().startObject().field("group", "c").field("text", "x y z term").endObject()) + ); + builders.add( + client().prepareIndex("field-collapsing") + .setId("8") + .setSource(jsonBuilder().startObject().field("group", "c").field("text", "x y term b").endObject()) + ); + builders.add( + client().prepareIndex("field-collapsing") + .setId("9") + .setSource(jsonBuilder().startObject().field("group", "c").field("text", "rare x term").field("value", 2).endObject()) + ); numArticles = scaledRandomIntBetween(10, 100); numArticles -= (numArticles % 5); @@ -219,48 +228,79 @@ public void setupSuiteScopeCluster() throws Exception { } builder.endArray().endObject(); - builders.add( - client().prepareIndex("articles").setSource(builder) - ); + builders.add(client().prepareIndex("articles").setSource(builder)); } builders.add( - client().prepareIndex("articles").setId("1") - .setSource(jsonBuilder().startObject().field("title", "title 1").field("body", "some text").startArray("comments") - .startObject() - .field("user", "a").field("date", 1L).field("message", "some comment") - .startArray("reviewers") - .startObject().field("name", "user a").endObject() - .startObject().field("name", "user b").endObject() - .startObject().field("name", "user c").endObject() - .endArray() - .endObject() - .startObject() - .field("user", "b").field("date", 2L).field("message", "some other comment") - .startArray("reviewers") - .startObject().field("name", "user c").endObject() - .startObject().field("name", "user d").endObject() - .startObject().field("name", "user e").endObject() - .endArray() - .endObject() - .endArray().endObject()) + client().prepareIndex("articles") + .setId("1") + .setSource( + jsonBuilder().startObject() + .field("title", "title 1") + .field("body", "some text") + .startArray("comments") + .startObject() + .field("user", "a") + .field("date", 1L) + .field("message", "some comment") + .startArray("reviewers") + .startObject() + .field("name", "user a") + .endObject() + .startObject() + .field("name", "user b") + .endObject() + .startObject() + .field("name", "user c") + .endObject() + .endArray() + .endObject() + .startObject() + .field("user", "b") + .field("date", 2L) + .field("message", "some other comment") + .startArray("reviewers") + .startObject() + .field("name", "user c") + .endObject() + .startObject() + .field("name", "user d") + .endObject() + .startObject() + .field("name", "user e") + .endObject() + .endArray() + .endObject() + .endArray() + .endObject() + ) ); builders.add( - client().prepareIndex("articles").setId("2") - .setSource(jsonBuilder().startObject().field("title", "title 2").field("body", "some different text") - .startArray("comments") - .startObject() - .field("user", "b").field("date", 3L).field("message", "some comment") - .startArray("reviewers") - .startObject().field("name", "user f").endObject() - .endArray() - .endObject() - .startObject() - .field("user", "c") - .field("date", 4L) - .field("message", "some other comment") - .endObject() - .endArray().endObject()) + client().prepareIndex("articles") + .setId("2") + .setSource( + jsonBuilder().startObject() + .field("title", "title 2") + .field("body", "some different text") + .startArray("comments") + .startObject() + .field("user", "b") + .field("date", 3L) + .field("message", "some comment") + .startArray("reviewers") + .startObject() + .field("name", "user f") + .endObject() + .endArray() + .endObject() + .startObject() + .field("user", "c") + .field("date", 4L) + .field("message", "some other comment") + .endObject() + .endArray() + .endObject() + ) ); indexRandom(true, builders); @@ -272,16 +312,13 @@ private String key(Terms.Bucket bucket) { } public void testBasics() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) - ) - ) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) + ) + .get(); assertSearchResponse(response); @@ -311,13 +348,11 @@ public void testBasics() throws Exception { public void testIssue11119() throws Exception { // Test that top_hits aggregation is fed scores if query results size=0 - SearchResponse response = client() - .prepareSearch("field-collapsing") - .setSize(0) - .setQuery(matchQuery("text", "x y z")) - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))) - .get(); + SearchResponse response = client().prepareSearch("field-collapsing") + .setSize(0) + .setQuery(matchQuery("text", "x y z")) + .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))) + .get(); assertSearchResponse(response); @@ -335,7 +370,7 @@ public void testIssue11119() throws Exception { SearchHits hits = topHits.getHits(); float bestScore = Float.MAX_VALUE; for (int h = 0; h < hits.getHits().length; h++) { - float score=hits.getAt(h).getScore(); + float score = hits.getAt(h).getScore(); assertThat(score, lessThanOrEqualTo(bestScore)); assertThat(score, greaterThan(0f)); bestScore = hits.getAt(h).getScore(); @@ -346,13 +381,12 @@ public void testIssue11119() throws Exception { // (technically not a test of top_hits but implementation details are // tied up with the need to feed scores into the agg tree even when // users don't want ranked set of query results.) - response = client() - .prepareSearch("field-collapsing") - .setSize(0) - .setMinScore(0.0001f) - .setQuery(matchQuery("text", "x y z")) - .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group")) - .get(); + response = client().prepareSearch("field-collapsing") + .setSize(0) + .setMinScore(0.0001f) + .setQuery(matchQuery("text", "x y z")) + .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group")) + .get(); assertSearchResponse(response); @@ -365,15 +399,15 @@ public void testIssue11119() throws Exception { assertThat(terms.getBuckets().size(), equalTo(3)); } - public void testBreadthFirstWithScoreNeeded() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .collectMode(SubAggCollectionMode.BREADTH_FIRST) - .field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").size(3)) - ).get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .collectMode(SubAggCollectionMode.BREADTH_FIRST) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").size(3)) + ) + .get(); assertSearchResponse(response); @@ -398,14 +432,15 @@ public void testBreadthFirstWithScoreNeeded() throws Exception { public void testBreadthFirstWithAggOrderAndScoreNeeded() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .collectMode(SubAggCollectionMode.BREADTH_FIRST) - .field(TERMS_AGGS_FIELD) - .order(BucketOrder.aggregation("max", false)) - .subAggregation(max("max").field(SORT_FIELD)) - .subAggregation(topHits("hits").size(3)) - ).get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .collectMode(SubAggCollectionMode.BREADTH_FIRST) + .field(TERMS_AGGS_FIELD) + .order(BucketOrder.aggregation("max", false)) + .subAggregation(max("max").field(SORT_FIELD)) + .subAggregation(topHits("hits").size(3)) + ) + .get(); assertSearchResponse(response); @@ -429,8 +464,10 @@ public void testBreadthFirstWithAggOrderAndScoreNeeded() throws Exception { } public void testBasicsGetProperty() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(global("global").subAggregation(topHits("hits"))).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(global("global").subAggregation(topHits("hits"))) + .get(); assertSearchResponse(searchResponse); @@ -443,7 +480,7 @@ public void testBasicsGetProperty() throws Exception { TopHits topHits = global.getAggregations().get("hits"); assertThat(topHits, notNullValue()); assertThat(topHits.getName(), equalTo("hits")); - assertThat((TopHits) ((InternalAggregation)global).getProperty("hits"), sameInstance(topHits)); + assertThat((TopHits) ((InternalAggregation) global).getProperty("hits"), sameInstance(topHits)); } @@ -451,25 +488,20 @@ public void testPagination() throws Exception { int size = randomIntBetween(1, 10); int from = randomIntBetween(0, 10); SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits") - .sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) - .from(from) - .size(size) - ) - ) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)).from(from).size(size)) + ) + .get(); assertSearchResponse(response); SearchResponse control = client().prepareSearch("idx") - .setFrom(from) - .setSize(size) - .setPostFilter(QueryBuilders.termQuery(TERMS_AGGS_FIELD, "val0")) - .addSort(SORT_FIELD, SortOrder.DESC) - .get(); + .setFrom(from) + .setSize(size) + .setPostFilter(QueryBuilders.termQuery(TERMS_AGGS_FIELD, "val0")) + .addSort(SORT_FIELD, SortOrder.DESC) + .get(); assertSearchResponse(control); SearchHits controlHits = control.getHits(); @@ -486,8 +518,14 @@ public void testPagination() throws Exception { assertThat(hits.getTotalHits().value, equalTo(controlHits.getTotalHits().value)); assertThat(hits.getHits().length, equalTo(controlHits.getHits().length)); for (int i = 0; i < hits.getHits().length; i++) { - logger.info("{}: top_hits: [{}][{}] control: [{}][{}]", i, hits.getAt(i).getId(), hits.getAt(i).getSortValues()[0], - controlHits.getAt(i).getId(), controlHits.getAt(i).getSortValues()[0]); + logger.info( + "{}: top_hits: [{}][{}] control: [{}][{}]", + i, + hits.getAt(i).getId(), + hits.getAt(i).getSortValues()[0], + controlHits.getAt(i).getId(), + controlHits.getAt(i).getSortValues()[0] + ); assertThat(hits.getAt(i).getId(), equalTo(controlHits.getAt(i).getId())); assertThat(hits.getAt(i).getSortValues()[0], equalTo(controlHits.getAt(i).getSortValues()[0])); } @@ -495,18 +533,14 @@ public void testPagination() throws Exception { public void testSortByBucket() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .order(BucketOrder.aggregation("max_sort", false)) - .subAggregation( - topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)).trackScores(true) - ) - .subAggregation( - max("max_sort").field(SORT_FIELD) - ) - ) - .get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .order(BucketOrder.aggregation("max_sort", false)) + .subAggregation(topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)).trackScores(true)) + .subAggregation(max("max_sort").field(SORT_FIELD)) + ) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); @@ -533,14 +567,17 @@ public void testSortByBucket() throws Exception { } public void testFieldCollapsing() throws Exception { - SearchResponse response = client() - .prepareSearch("field-collapsing") - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(matchQuery("text", "term rare")) - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field("group") - .order(BucketOrder.aggregation("max_score", false)).subAggregation(topHits("hits").size(1)) - .subAggregation(max("max_score").field("value"))).get(); + SearchResponse response = client().prepareSearch("field-collapsing") + .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setQuery(matchQuery("text", "term rare")) + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field("group") + .order(BucketOrder.aggregation("max_score", false)) + .subAggregation(topHits("hits").size(1)) + .subAggregation(max("max_score").field("value")) + ) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); @@ -577,25 +614,24 @@ public void testFieldCollapsing() throws Exception { public void testFetchFeatures() { final boolean seqNoAndTerm = randomBoolean(); SearchResponse response = client().prepareSearch("idx") - .setQuery(matchQuery("text", "text").queryName("test")) - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").size(1) - .highlighter(new HighlightBuilder().field("text")) - .explain(true) - .storedField("text") - .docValueField("field1") - .fetchField("field2") - .scriptField("script", - new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) - .fetchSource("text", null) - .version(true) - .seqNoAndPrimaryTerm(seqNoAndTerm) - ) - ) - .get(); + .setQuery(matchQuery("text", "text").queryName("test")) + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation( + topHits("hits").size(1) + .highlighter(new HighlightBuilder().field("text")) + .explain(true) + .storedField("text") + .docValueField("field1") + .fetchField("field2") + .scriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) + .fetchSource("text", null) + .version(true) + .seqNoAndPrimaryTerm(seqNoAndTerm) + ) + ) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); @@ -649,13 +685,12 @@ public void testFetchFeatures() { public void testInvalidSortField() throws Exception { try { client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").sort(SortBuilders.fieldSort("xyz").order(SortOrder.DESC)) - ) - ).get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").sort(SortBuilders.fieldSort("xyz").order(SortOrder.DESC))) + ) + .get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("No mapping found for [xyz] in order to sort on")); @@ -663,9 +698,7 @@ public void testInvalidSortField() throws Exception { } public void testEmptyIndex() throws Exception { - SearchResponse response = client().prepareSearch("empty") - .addAggregation(topHits("hits")) - .get(); + SearchResponse response = client().prepareSearch("empty").addAggregation(topHits("hits")).get(); assertSearchResponse(response); TopHits hits = response.getAggregations().get("hits"); @@ -675,21 +708,16 @@ public void testEmptyIndex() throws Exception { } public void testTrackScores() throws Exception { - boolean[] trackScores = new boolean[]{true, false}; + boolean[] trackScores = new boolean[] { true, false }; for (boolean trackScore : trackScores) { logger.info("Track score={}", trackScore); SearchResponse response = client().prepareSearch("field-collapsing") - .setQuery(matchQuery("text", "term rare")) - .addAggregation(terms("terms") - .field("group") - .subAggregation( - topHits("hits") - .trackScores(trackScore) - .size(1) - .sort("_index", SortOrder.DESC) - ) - ) - .get(); + .setQuery(matchQuery("text", "term rare")) + .addAggregation( + terms("terms").field("group") + .subAggregation(topHits("hits").trackScores(trackScore).size(1).sort("_index", SortOrder.DESC)) + ) + .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); @@ -722,18 +750,13 @@ public void testTrackScores() throws Exception { public void testTopHitsInNestedSimple() throws Exception { SearchResponse searchResponse = client().prepareSearch("articles") - .setQuery(matchQuery("title", "title")) - .addAggregation( - nested("to-comments", "comments") - .subAggregation( - terms("users") - .field("comments.user") - .subAggregation( - topHits("top-comments").sort("comments.date", SortOrder.ASC) - ) - ) + .setQuery(matchQuery("title", "title")) + .addAggregation( + nested("to-comments", "comments").subAggregation( + terms("users").field("comments.user").subAggregation(topHits("top-comments").sort("comments.date", SortOrder.ASC)) ) - .get(); + ) + .get(); Nested nested = searchResponse.getAggregations().get("to-comments"); assertThat(nested.getDocCount(), equalTo(4L)); @@ -772,18 +795,16 @@ public void testTopHitsInNestedSimple() throws Exception { public void testTopHitsInSecondLayerNested() throws Exception { SearchResponse searchResponse = client().prepareSearch("articles") - .setQuery(matchQuery("title", "title")) - .addAggregation( - nested("to-comments", "comments") - .subAggregation( - nested("to-reviewers", "comments.reviewers").subAggregation( - // Also need to sort on _doc because there are two reviewers with the same name - topHits("top-reviewers") - .sort("comments.reviewers.name", SortOrder.ASC).sort("_doc", SortOrder.DESC).size(7) - ) - ) - .subAggregation(topHits("top-comments").sort("comments.date", SortOrder.DESC).size(4)) - ).get(); + .setQuery(matchQuery("title", "title")) + .addAggregation( + nested("to-comments", "comments").subAggregation( + nested("to-reviewers", "comments.reviewers").subAggregation( + // Also need to sort on _doc because there are two reviewers with the same name + topHits("top-reviewers").sort("comments.reviewers.name", SortOrder.ASC).sort("_doc", SortOrder.DESC).size(7) + ) + ).subAggregation(topHits("top-comments").sort("comments.date", SortOrder.DESC).size(4)) + ) + .get(); assertNoFailures(searchResponse); Nested toComments = searchResponse.getAggregations().get("to-comments"); @@ -872,22 +893,27 @@ public void testTopHitsInSecondLayerNested() throws Exception { public void testNestedFetchFeatures() { String hlType = randomFrom("plain", "fvh", "unified"); - HighlightBuilder.Field hlField = new HighlightBuilder.Field("comments.message") - .highlightQuery(matchQuery("comments.message", "comment")) - .forceSource(randomBoolean()) // randomly from stored field or _source - .highlighterType(hlType); - - SearchResponse searchResponse = client() - .prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "comment").queryName("test"), ScoreMode.Avg)) - .addAggregation( - nested("to-comments", "comments").subAggregation( - topHits("top-comments").size(1).highlighter(new HighlightBuilder().field(hlField)).explain(true) - .docValueField("comments.user") - .scriptField("script", - new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) - .fetchSource("comments.message", null) - .version(true).sort("comments.date", SortOrder.ASC))).get(); + HighlightBuilder.Field hlField = new HighlightBuilder.Field("comments.message").highlightQuery( + matchQuery("comments.message", "comment") + ) + .forceSource(randomBoolean()) // randomly from stored field or _source + .highlighterType(hlType); + + SearchResponse searchResponse = client().prepareSearch("articles") + .setQuery(nestedQuery("comments", matchQuery("comments.message", "comment").queryName("test"), ScoreMode.Avg)) + .addAggregation( + nested("to-comments", "comments").subAggregation( + topHits("top-comments").size(1) + .highlighter(new HighlightBuilder().field(hlField)) + .explain(true) + .docValueField("comments.user") + .scriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) + .fetchSource("comments.message", null) + .version(true) + .sort("comments.date", SortOrder.ASC) + ) + ) + .get(); assertHitCount(searchResponse, 2); Nested nested = searchResponse.getAggregations().get("to-comments"); assertThat(nested.getDocCount(), equalTo(4L)); @@ -927,24 +953,20 @@ public void testNestedFetchFeatures() { public void testTopHitsInNested() throws Exception { SearchResponse searchResponse = client().prepareSearch("articles") - .addAggregation( - histogram("dates") - .field("date") - .interval(5) - .subAggregation( - nested("to-comments", "comments") - .subAggregation(topHits("comments") - .highlighter( - new HighlightBuilder() - .field( - new HighlightBuilder.Field("comments.message") - .highlightQuery(matchQuery("comments.message", "text")) - ) - ) - .sort("comments.id", SortOrder.ASC)) + .addAggregation( + histogram("dates").field("date") + .interval(5) + .subAggregation( + nested("to-comments", "comments").subAggregation( + topHits("comments").highlighter( + new HighlightBuilder().field( + new HighlightBuilder.Field("comments.message").highlightQuery(matchQuery("comments.message", "text")) ) - ) - .get(); + ).sort("comments.id", SortOrder.ASC) + ) + ) + ) + .get(); Histogram histogram = searchResponse.getAggregations().get("dates"); for (int i = 0; i < numArticles; i += 5) { @@ -971,93 +993,101 @@ public void testTopHitsInNested() throws Exception { } public void testUseMaxDocInsteadOfSize() throws Exception { - client().admin().indices().prepareUpdateSettings("idx") + client().admin() + .indices() + .prepareUpdateSettings("idx") .setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), ArrayUtil.MAX_ARRAY_LENGTH)) .get(); - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").size(ArrayUtil.MAX_ARRAY_LENGTH - 1) - .sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) - ) - ) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation( + topHits("hits").size(ArrayUtil.MAX_ARRAY_LENGTH - 1).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) + ) + ) + .get(); assertNoFailures(response); - client().admin().indices().prepareUpdateSettings("idx") + client().admin() + .indices() + .prepareUpdateSettings("idx") .setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), null)) .get(); } public void testTooHighResultWindow() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").from(50).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) - ) + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").from(50).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) ) .get(); assertNoFailures(response); - Exception e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").from(100).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) + Exception e = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch("idx") + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").from(100).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) ) - ).get()); - assertThat(e.getCause().getMessage(), - containsString("the top hits aggregator [hits]'s from + size must be less than or equal to: [100] but was [110]")); - e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").from(10).size(100).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) + .get() + ); + assertThat( + e.getCause().getMessage(), + containsString("the top hits aggregator [hits]'s from + size must be less than or equal to: [100] but was [110]") + ); + e = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch("idx") + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").from(10).size(100).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) ) - ).get()); - assertThat(e.getCause().getMessage(), - containsString("the top hits aggregator [hits]'s from + size must be less than or equal to: [100] but was [110]")); + .get() + ); + assertThat( + e.getCause().getMessage(), + containsString("the top hits aggregator [hits]'s from + size must be less than or equal to: [100] but was [110]") + ); - client().admin().indices().prepareUpdateSettings("idx") + client().admin() + .indices() + .prepareUpdateSettings("idx") .setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), 110)) .get(); response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").from(100).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) - )).get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").from(100).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) + ) + .get(); assertNoFailures(response); response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").from(10).size(100).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) - )).get(); + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").from(10).size(100).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) + ) + .get(); assertNoFailures(response); - client().admin().indices().prepareUpdateSettings("idx") + client().admin() + .indices() + .prepareUpdateSettings("idx") .setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), null)) .get(); } public void testNoStoredFields() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms") - .executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").storedField("_none_") - ) + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").storedField("_none_")) ) .get(); @@ -1090,79 +1120,210 @@ public void testNoStoredFields() throws Exception { */ public void testScriptCaching() throws Exception { try { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") - .setSettings( - Settings.builder() - .put("requests.cache.enable", true) - .put("number_of_shards", 1) - .put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") + .setSettings( + Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1) + ) + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script field does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(topHits("foo").scriptField("bar", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap()))).get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + topHits("foo").scriptField( + "bar", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap()) + ) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script sort does not get cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(topHits("foo").sort( - SortBuilders.scriptSort( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap()), - ScriptSortType.STRING))) + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + topHits("foo").sort( + SortBuilders.scriptSort( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap()), + ScriptSortType.STRING + ) + ) + ) .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script field does not get cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(topHits("foo").scriptField("bar", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "5", Collections.emptyMap()))).get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + topHits("foo").scriptField("bar", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "5", Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Test that a request using a deterministic script sort does not get cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(topHits("foo").sort( - SortBuilders.scriptSort( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "5", Collections.emptyMap()), ScriptSortType.STRING))) + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + topHits("foo").sort( + SortBuilders.scriptSort( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "5", Collections.emptyMap()), + ScriptSortType.STRING + ) + ) + ) .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(topHits("foo")).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(3L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(3L) + ); } finally { assertAcked(client().admin().indices().prepareDelete("cache_test_idx")); // delete this - if we use tests.iters it would fail } @@ -1171,17 +1332,9 @@ public void testScriptCaching() throws Exception { public void testWithRescore() { // Rescore with default sort on relevancy (score) { - SearchResponse response = client() - .prepareSearch("idx") - .addRescorer( - new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f)) - ) - .addAggregation(terms("terms") - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits") - ) - ) + SearchResponse response = client().prepareSearch("idx") + .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) + .addAggregation(terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits"))) .get(); Terms terms = response.getAggregations().get("terms"); for (Terms.Bucket bucket : terms.getBuckets()) { @@ -1193,17 +1346,9 @@ public void testWithRescore() { } { - SearchResponse response = client() - .prepareSearch("idx") - .addRescorer( - new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f)) - ) - .addAggregation(terms("terms") - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").sort(SortBuilders.scoreSort()) - ) - ) + SearchResponse response = client().prepareSearch("idx") + .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) + .addAggregation(terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits").sort(SortBuilders.scoreSort()))) .get(); Terms terms = response.getAggregations().get("terms"); for (Terms.Bucket bucket : terms.getBuckets()) { @@ -1216,16 +1361,10 @@ public void testWithRescore() { // Rescore should not be applied if the sort order is not relevancy { - SearchResponse response = client() - .prepareSearch("idx") - .addRescorer( - new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f)) - ) - .addAggregation(terms("terms") - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").sort(SortBuilders.fieldSort("_index")) - ) + SearchResponse response = client().prepareSearch("idx") + .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) + .addAggregation( + terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits").sort(SortBuilders.fieldSort("_index"))) ) .get(); Terms terms = response.getAggregations().get("terms"); @@ -1238,16 +1377,11 @@ public void testWithRescore() { } { - SearchResponse response = client() - .prepareSearch("idx") - .addRescorer( - new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f)) - ) - .addAggregation(terms("terms") - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").sort(SortBuilders.scoreSort()).sort(SortBuilders.fieldSort("_index")) - ) + SearchResponse response = client().prepareSearch("idx") + .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) + .addAggregation( + terms("terms").field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").sort(SortBuilders.scoreSort()).sort(SortBuilders.fieldSort("_index"))) ) .get(); Terms terms = response.getAggregations().get("terms"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index 516a14c50a47f..d5659586a24fd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -49,12 +49,12 @@ public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); for (int i = 0; i < 10; i++) { - client().prepareIndex("idx").setId(""+i).setSource(jsonBuilder() - .startObject() - .field("value", i+1) - .startArray("values").value(i+2).value(i+3).endArray() - .endObject()) - .get(); + client().prepareIndex("idx") + .setId("" + i) + .setSource( + jsonBuilder().startObject().field("value", i + 1).startArray("values").value(i + 2).value(i + 3).endArray().endObject() + ) + .get(); } client().admin().indices().prepareFlush().get(); client().admin().indices().prepareRefresh().get(); @@ -68,9 +68,9 @@ protected Collection> nodePlugins() { public void testUnmapped() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation(count("count").field("value")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(count("count").field("value")) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); @@ -82,9 +82,9 @@ public void testUnmapped() throws Exception { public void testSingleValuedField() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(count("count").field("value")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(count("count").field("value")) + .get(); assertHitCount(searchResponse, 10); @@ -95,8 +95,10 @@ public void testSingleValuedField() throws Exception { } public void testSingleValuedFieldGetProperty() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(global("global").subAggregation(count("count").field("value"))).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(global("global").subAggregation(count("count").field("value"))) + .get(); assertHitCount(searchResponse, 10); @@ -111,16 +113,16 @@ public void testSingleValuedFieldGetProperty() throws Exception { assertThat(valueCount, notNullValue()); assertThat(valueCount.getName(), equalTo("count")); assertThat(valueCount.getValue(), equalTo(10L)); - assertThat((ValueCount) ((InternalAggregation)global).getProperty("count"), equalTo(valueCount)); - assertThat((double) ((InternalAggregation)global).getProperty("count.value"), equalTo(10d)); - assertThat((double) ((InternalAggregation)valueCount).getProperty("value"), equalTo(10d)); + assertThat((ValueCount) ((InternalAggregation) global).getProperty("count"), equalTo(valueCount)); + assertThat((double) ((InternalAggregation) global).getProperty("count.value"), equalTo(10d)); + assertThat((double) ((InternalAggregation) valueCount).getProperty("value"), equalTo(10d)); } public void testSingleValuedFieldPartiallyUnmapped() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) - .addAggregation(count("count").field("value")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(count("count").field("value")) + .get(); assertHitCount(searchResponse, 10); @@ -132,9 +134,9 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { public void testMultiValuedField() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(count("count").field("values")) - .get(); + .setQuery(matchAllQuery()) + .addAggregation(count("count").field("values")) + .get(); assertHitCount(searchResponse, 10); @@ -145,9 +147,11 @@ public void testMultiValuedField() throws Exception { } public void testSingleValuedScript() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(count("count").script( - new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, VALUE_FIELD_SCRIPT, Collections.emptyMap()))) + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + count("count").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, VALUE_FIELD_SCRIPT, Collections.emptyMap())) + ) .get(); assertHitCount(searchResponse, 10); @@ -159,9 +163,11 @@ public void testSingleValuedScript() throws Exception { } public void testMultiValuedScript() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(count("count").script( - new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, SUM_VALUES_FIELD_SCRIPT, Collections.emptyMap()))) + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + count("count").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, SUM_VALUES_FIELD_SCRIPT, Collections.emptyMap())) + ) .get(); assertHitCount(searchResponse, 10); @@ -174,7 +180,8 @@ public void testMultiValuedScript() throws Exception { public void testSingleValuedScriptWithParams() throws Exception { Map params = Collections.singletonMap("field", "value"); - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) .addAggregation(count("count").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, SUM_FIELD_PARAMS_SCRIPT, params))) .get(); @@ -188,9 +195,10 @@ public void testSingleValuedScriptWithParams() throws Exception { public void testMultiValuedScriptWithParams() throws Exception { Map params = Collections.singletonMap("field", "values"); - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(count("count").script( - new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, SUM_FIELD_PARAMS_SCRIPT, params))).get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(count("count").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, SUM_FIELD_PARAMS_SCRIPT, params))) + .get(); assertHitCount(searchResponse, 10); @@ -205,57 +213,143 @@ public void testMultiValuedScriptWithParams() throws Exception { * Ensure requests using nondeterministic scripts do not get cached. */ public void testScriptCaching() throws Exception { - assertAcked(prepareCreate("cache_test_idx").setMapping("d", "type=long") + assertAcked( + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get()); - indexRandom(true, client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), - client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2)); + .get() + ); + indexRandom( + true, + client().prepareIndex("cache_test_idx").setId("1").setSource("s", 1), + client().prepareIndex("cache_test_idx").setId("2").setSource("s", 2) + ); // Make sure we are starting with a clear cache - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(count("foo").field("d").script( - new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, RANDOM_SCRIPT, Collections.emptyMap()))) - .get(); + SearchResponse r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + count("foo").field("d").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, RANDOM_SCRIPT, Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(0L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(0L) + ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx").setSize(0) - .addAggregation(count("foo").field("d").script( - new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, VALUE_FIELD_SCRIPT, Collections.emptyMap()))) - .get(); + r = client().prepareSearch("cache_test_idx") + .setSize(0) + .addAggregation( + count("foo").field("d") + .script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, VALUE_FIELD_SCRIPT, Collections.emptyMap())) + ) + .get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(1L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(1L) + ); // Ensure that non-scripted requests are cached as normal r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(count("foo").field("d")).get(); assertSearchResponse(r); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getHitCount(), equalTo(0L)); - assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() - .getMissCount(), equalTo(2L)); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getHitCount(), + equalTo(0L) + ); + assertThat( + client().admin() + .indices() + .prepareStats("cache_test_idx") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMissCount(), + equalTo(2L) + ); } public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>count", true))) - .subAggregation(filter("filter", termQuery("value", 100)).subAggregation(count("count").field("value")))) - .get(); + SearchResponse searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + terms("terms").field("value") + .order(BucketOrder.compound(BucketOrder.aggregation("filter>count", true))) + .subAggregation(filter("filter", termQuery("value", 100)).subAggregation(count("count").field("value"))) + ) + .get(); assertHitCount(searchResponse, 10); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java index 57fe539ad0bd6..d7a1d63311c1c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java @@ -47,8 +47,7 @@ public class AvgBucketIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping("tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -64,17 +63,26 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); - builders.add(client().prepareIndex("idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval)) - .endObject())); + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, fieldValue) + .field("tag", "tag" + (i % interval)) + .endObject() + ) + ); final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1); valueCounts[bucket]++; } assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); @@ -82,9 +90,11 @@ public void setupSuiteScopeCluster() throws Exception { public void testDocCountTopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .addAggregation(avgBucket("avg_bucket", "histo>_count")).get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .addAggregation(avgBucket("avg_bucket", "histo>_count")) + .get(); assertSearchResponse(response); @@ -113,16 +123,16 @@ public void testDocCountTopLevel() throws Exception { } public void testDocCountAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(avgBucket("avg_bucket", "histo>_count"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(avgBucket("avg_bucket", "histo>_count")) + ) + .get(); assertSearchResponse(response); @@ -161,10 +171,10 @@ public void testDocCountAsSubAgg() throws Exception { } public void testMetricTopLevel() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(avgBucket("avg_bucket", "terms>sum")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(avgBucket("avg_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -195,17 +205,19 @@ public void testMetricTopLevel() throws Exception { } public void testMetricAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(avgBucket("avg_bucket", "histo>sum"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(avgBucket("avg_bucket", "histo>sum")) + ) + .get(); assertSearchResponse(response); @@ -248,18 +260,19 @@ public void testMetricAsSubAgg() throws Exception { } public void testMetricAsSubAggWithInsertZeros() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(avgBucket("avg_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(avgBucket("avg_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS)) + ) + .get(); assertSearchResponse(response); @@ -302,9 +315,13 @@ public void testMetricAsSubAggWithInsertZeros() throws Exception { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(avgBucket("avg_bucket", "terms>sum")).get(); + .addAggregation( + terms("terms").field("tag") + .includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .addAggregation(avgBucket("avg_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -321,17 +338,17 @@ public void testNoBuckets() throws Exception { } public void testNested() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(avgBucket("avg_histo_bucket", "histo>_count"))) - .addAggregation(avgBucket("avg_terms_bucket", "terms>avg_histo_bucket")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(avgBucket("avg_histo_bucket", "histo>_count")) + ) + .addAggregation(avgBucket("avg_terms_bucket", "terms>avg_histo_bucket")) + .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java index ab51b4eeace4d..79186b851799f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java @@ -151,21 +151,24 @@ private XContentBuilder newDocBuilder() throws IOException { } public void testInlineScript() { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript("seriesArithmetic", - new Script(ScriptType.INLINE, - CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "field2Sum", "field3Sum", "field4Sum"))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" + ) + ) + ) + .get(); assertSearchResponse(response); @@ -198,21 +201,24 @@ public void testInlineScript() { } public void testInlineScript2() { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript("seriesArithmetic", - new Script(ScriptType.INLINE, - CustomScriptPlugin.NAME, "_value0 + _value1 / _value2", Collections.emptyMap()), - "field2Sum", "field3Sum", "field4Sum"))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 / _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" + ) + ) + ) + .get(); assertSearchResponse(response); @@ -245,19 +251,23 @@ public void testInlineScript2() { } public void testInlineScriptWithDateRange() { - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") .addAggregation( - dateRange("range") - .field(FIELD_5_NAME) + dateRange("range").field(FIELD_5_NAME) .addUnboundedFrom(date) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()) - , "field2Sum", "field3Sum", "field4Sum"))) + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" + ) + ) + ) .get(); assertSearchResponse(response); @@ -291,18 +301,20 @@ public void testInlineScriptWithDateRange() { } public void testInlineScriptSingleVariable() { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation( - bucketScript("seriesArithmetic", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0", Collections.emptyMap()), - "field2Sum"))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0", Collections.emptyMap()), + "field2Sum" + ) + ) + ) + .get(); assertSearchResponse(response); @@ -333,20 +345,22 @@ public void testInlineScriptNamedVars() { bucketsPathsMap.put("foo", "field2Sum"); bucketsPathsMap.put("bar", "field3Sum"); bucketsPathsMap.put("baz", "field4Sum"); - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript("seriesArithmetic", bucketsPathsMap, - new Script(ScriptType.INLINE, - CustomScriptPlugin.NAME, "foo + bar + baz", Collections.emptyMap())))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + bucketsPathsMap, + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "foo + bar + baz", Collections.emptyMap()) + ) + ) + ) + .get(); assertSearchResponse(response); @@ -384,17 +398,16 @@ public void testInlineScriptWithParams() { Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "(_value0 + _value1 + _value2) * factor", params); - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation(bucketScript("seriesArithmetic", script, "field2Sum", "field3Sum", "field4Sum"))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation(bucketScript("seriesArithmetic", script, "field2Sum", "field3Sum", "field4Sum")) + ) + .get(); assertSearchResponse(response); @@ -427,21 +440,24 @@ public void testInlineScriptWithParams() { } public void testInlineScriptInsertZeros() { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript("seriesArithmetic", - new Script(ScriptType.INLINE, - CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "field2Sum", "field3Sum", "field4Sum").gapPolicy(GapPolicy.INSERT_ZEROS))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" + ).gapPolicy(GapPolicy.INSERT_ZEROS) + ) + ) + .get(); assertSearchResponse(response); @@ -476,18 +492,18 @@ public void testInlineScriptInsertZeros() { } public void testInlineScriptReturnNull() { - SearchResponse response = client() - .prepareSearch("idx") + SearchResponse response = client().prepareSearch("idx") .addAggregation( - histogram("histo") - .field(FIELD_1_NAME).interval(interval) + histogram("histo").field(FIELD_1_NAME) + .interval(interval) .subAggregation( bucketScript( "nullField", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return null", Collections.emptyMap()) ) ) - ).get(); + ) + .get(); assertSearchResponse(response); @@ -503,25 +519,36 @@ public void testInlineScriptReturnNull() { } public void testStoredScript() { - assertAcked(client().admin().cluster().preparePutStoredScript() + assertAcked( + client().admin() + .cluster() + .preparePutStoredScript() .setId("my_script") // Script source is not interpreted but it references a pre-defined script from CustomScriptPlugin - .setContent(new BytesArray("{ \"script\": {\"lang\": \"" + CustomScriptPlugin.NAME + "\"," + - " \"source\": \"my_script\" } }"), XContentType.JSON)); - - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript("seriesArithmetic", - new Script(ScriptType.STORED, null, "my_script", Collections.emptyMap()), - "field2Sum", "field3Sum", "field4Sum"))).get(); + .setContent( + new BytesArray("{ \"script\": {\"lang\": \"" + CustomScriptPlugin.NAME + "\"," + " \"source\": \"my_script\" } }"), + XContentType.JSON + ) + ); + + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.STORED, null, "my_script", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" + ) + ) + ) + .get(); assertSearchResponse(response); @@ -554,21 +581,24 @@ public void testStoredScript() { } public void testUnmapped() throws Exception { - SearchResponse response = client() - .prepareSearch("idx_unmapped") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript("seriesArithmetic", - new Script(ScriptType.INLINE, - CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "field2Sum", "field3Sum", "field4Sum"))) - .get(); + SearchResponse response = client().prepareSearch("idx_unmapped") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" + ) + ) + ) + .get(); assertSearchResponse(response); @@ -579,20 +609,24 @@ public void testUnmapped() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client() - .prepareSearch("idx", "idx_unmapped") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript("seriesArithmetic", - new Script(ScriptType.INLINE, - CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "field2Sum", "field3Sum", "field4Sum"))).get(); + SearchResponse response = client().prepareSearch("idx", "idx_unmapped") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" + ) + ) + ) + .get(); assertSearchResponse(response); @@ -633,17 +667,19 @@ public void testSingleBucketPathAgg() throws Exception { .field("lang", CustomScriptPlugin.NAME) .endObject() .endObject(); - BucketScriptPipelineAggregationBuilder bucketScriptAgg = - BucketScriptPipelineAggregationBuilder.PARSER.parse(createParser(content), "seriesArithmetic"); + BucketScriptPipelineAggregationBuilder bucketScriptAgg = BucketScriptPipelineAggregationBuilder.PARSER.parse( + createParser(content), + "seriesArithmetic" + ); - SearchResponse response = client() - .prepareSearch("idx", "idx_unmapped") + SearchResponse response = client().prepareSearch("idx", "idx_unmapped") .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) + histogram("histo").field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(bucketScriptAgg)).get(); + .subAggregation(bucketScriptAgg) + ) + .get(); assertSearchResponse(response); @@ -678,19 +714,21 @@ public void testArrayBucketPathAgg() throws Exception { .field("lang", CustomScriptPlugin.NAME) .endObject() .endObject(); - BucketScriptPipelineAggregationBuilder bucketScriptAgg = - BucketScriptPipelineAggregationBuilder.PARSER.parse(createParser(content), "seriesArithmetic"); + BucketScriptPipelineAggregationBuilder bucketScriptAgg = BucketScriptPipelineAggregationBuilder.PARSER.parse( + createParser(content), + "seriesArithmetic" + ); - SearchResponse response = client() - .prepareSearch("idx", "idx_unmapped") + SearchResponse response = client().prepareSearch("idx", "idx_unmapped") .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) + histogram("histo").field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation(bucketScriptAgg)).get(); + .subAggregation(bucketScriptAgg) + ) + .get(); assertSearchResponse(response); @@ -726,28 +764,30 @@ public void testObjectBucketPathAgg() throws Exception { XContentBuilder content = XContentFactory.jsonBuilder() .startObject() .startObject("buckets_path") - .field("_value0", "field2Sum") - .field("_value1", "field3Sum") - .field("_value2", "field4Sum") + .field("_value0", "field2Sum") + .field("_value1", "field3Sum") + .field("_value2", "field4Sum") .endObject() .startObject("script") .field("source", "_value0 + _value1 + _value2") .field("lang", CustomScriptPlugin.NAME) .endObject() .endObject(); - BucketScriptPipelineAggregationBuilder bucketScriptAgg = - BucketScriptPipelineAggregationBuilder.PARSER.parse(createParser(content), "seriesArithmetic"); + BucketScriptPipelineAggregationBuilder bucketScriptAgg = BucketScriptPipelineAggregationBuilder.PARSER.parse( + createParser(content), + "seriesArithmetic" + ); - SearchResponse response = client() - .prepareSearch("idx", "idx_unmapped") + SearchResponse response = client().prepareSearch("idx", "idx_unmapped") .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) + histogram("histo").field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation(bucketScriptAgg)).get(); + .subAggregation(bucketScriptAgg) + ) + .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java index 04a2a3b2e836b..f5337d0f85639 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java @@ -150,8 +150,12 @@ public void setupSuiteScopeCluster() throws Exception { } private XContentBuilder newDocBuilder() throws IOException { - return newDocBuilder(randomIntBetween(minNumber, maxNumber), randomIntBetween(minNumber, maxNumber), - randomIntBetween(minNumber, maxNumber), randomIntBetween(minNumber, maxNumber)); + return newDocBuilder( + randomIntBetween(minNumber, maxNumber), + randomIntBetween(minNumber, maxNumber), + randomIntBetween(minNumber, maxNumber), + randomIntBetween(minNumber, maxNumber) + ); } private XContentBuilder newDocBuilder(int field1Value, int field2Value, int field3Value, int field4Value) throws IOException { @@ -166,14 +170,22 @@ private XContentBuilder newDocBuilder(int field1Value, int field2Value, int fiel } public void testInlineScript() { - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", Collections.emptyMap()); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", + Collections.emptyMap() + ); SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(FIELD_1_NAME).interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)).subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) - .get(); + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum")) + ) + .get(); assertSearchResponse(response); @@ -195,19 +207,22 @@ public void testInlineScript() { } public void testInlineScriptNoBucketsPruned() { - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "Double.isNaN(_value0) ? true : (_value0 < 10000)", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) - .get(); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? true : (_value0 < 10000)", + Collections.emptyMap() + ); + + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum")) + ) + .get(); assertSearchResponse(response); @@ -229,19 +244,22 @@ public void testInlineScriptNoBucketsPruned() { } public void testInlineScriptNoBucketsLeft() { - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "Double.isNaN(_value0) ? false : (_value0 > 10000)", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) - .get(); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 > 10000)", + Collections.emptyMap() + ); + + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum")) + ) + .get(); assertSearchResponse(response); @@ -253,19 +271,22 @@ public void testInlineScriptNoBucketsLeft() { } public void testInlineScript2() { - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "Double.isNaN(_value0) ? false : (_value0 < _value1)", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) - .get(); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 < _value1)", + Collections.emptyMap() + ); + + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum")) + ) + .get(); assertSearchResponse(response); @@ -287,18 +308,21 @@ public void testInlineScript2() { } public void testInlineScriptSingleVariable() { - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "Double.isNaN(_value0) ? false : (_value0 > 100)", Collections.emptyMap()); - - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(bucketSelector("bucketSelector", script, "field2Sum"))) - .get(); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 > 100)", + Collections.emptyMap() + ); + + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum")) + ) + .get(); assertSearchResponse(response); @@ -317,22 +341,26 @@ public void testInlineScriptSingleVariable() { } public void testInlineScriptNamedVars() { - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)", Collections.emptyMap()); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)", + Collections.emptyMap() + ); Map bucketPathsMap = new HashMap<>(); bucketPathsMap.put("my_value1", "field2Sum"); bucketPathsMap.put("my_value2", "field3Sum"); SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", bucketPathsMap, script))) - .get(); + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", bucketPathsMap, script)) + ) + .get(); assertSearchResponse(response); @@ -354,18 +382,22 @@ public void testInlineScriptNamedVars() { } public void testInlineScriptWithParams() { - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "Double.isNaN(_value0) ? false : (_value0 + _value1 > threshold)", Collections.singletonMap("threshold", 100)); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 + _value1 > threshold)", + Collections.singletonMap("threshold", 100) + ); SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) - .get(); + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum")) + ) + .get(); assertSearchResponse(response); @@ -390,15 +422,14 @@ public void testInlineScriptInsertZeros() { Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 > 100", Collections.emptyMap()); SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", script , "field2Sum", "field3Sum") - .gapPolicy(GapPolicy.INSERT_ZEROS))) - .get(); + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum").gapPolicy(GapPolicy.INSERT_ZEROS)) + ) + .get(); assertSearchResponse(response); @@ -420,25 +451,34 @@ public void testInlineScriptInsertZeros() { } public void testStoredScript() { - assertAcked(client().admin().cluster().preparePutStoredScript() + assertAcked( + client().admin() + .cluster() + .preparePutStoredScript() .setId("my_script") // Source is not interpreted but my_script is defined in CustomScriptPlugin - .setContent(new BytesArray("{ \"script\": { \"lang\": \"" + CustomScriptPlugin.NAME + "\", " + - "\"source\": \"Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)\" } }"), - XContentType.JSON)); + .setContent( + new BytesArray( + "{ \"script\": { \"lang\": \"" + + CustomScriptPlugin.NAME + + "\", " + + "\"source\": \"Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)\" } }" + ), + XContentType.JSON + ) + ); Script script = new Script(ScriptType.STORED, null, "my_script", Collections.emptyMap()); - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum")) + ) + .get(); assertSearchResponse(response); @@ -460,18 +500,22 @@ public void testStoredScript() { } public void testUnmapped() throws Exception { - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", Collections.emptyMap()); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", + Collections.emptyMap() + ); SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) - .get(); + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum")) + ) + .get(); assertSearchResponse(response); @@ -482,18 +526,22 @@ public void testUnmapped() throws Exception { } public void testPartiallyUnmapped() throws Exception { - Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, - "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", Collections.emptyMap()); + Script script = new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", + Collections.emptyMap() + ); SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) - .get(); + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum")) + ) + .get(); assertSearchResponse(response); @@ -516,19 +564,18 @@ public void testPartiallyUnmapped() throws Exception { public void testEmptyBuckets() { SearchResponse response = client().prepareSearch("idx_with_gaps") - .addAggregation( - histogram("histo") - .field(FIELD_1_NAME) - .interval(1) - .subAggregation( - histogram("inner_histo") - .field(FIELD_1_NAME) - .interval(1) - .extendedBounds(1L, 4L) - .minDocCount(0) - .subAggregation(derivative("derivative", "_count") - .gapPolicy(GapPolicy.INSERT_ZEROS)))) - .get(); + .addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(1) + .subAggregation( + histogram("inner_histo").field(FIELD_1_NAME) + .interval(1) + .extendedBounds(1L, 4L) + .minDocCount(0) + .subAggregation(derivative("derivative", "_count").gapPolicy(GapPolicy.INSERT_ZEROS)) + ) + ) + .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java index b173c15805a9b..582bf13b0c7b4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java @@ -11,8 +11,8 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -58,9 +58,11 @@ public class BucketSortIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { createIndex(INDEX, INDEX_WITH_GAPS); - client().admin().indices().preparePutMapping(INDEX) - .setSource("time", "type=date", "foo", "type=keyword", "value_1", "type=float", "value_2", "type=float") - .get(); + client().admin() + .indices() + .preparePutMapping(INDEX) + .setSource("time", "type=date", "foo", "type=keyword", "value_1", "type=float", "value_2", "type=float") + .get(); int numTerms = 10; List terms = new ArrayList<>(numTerms); @@ -75,8 +77,9 @@ public void setupSuiteScopeCluster() throws Exception { for (String term : terms) { int termCount = randomIntBetween(3, 6); for (int i = 0; i < termCount; ++i) { - builders.add(client().prepareIndex(INDEX) - .setSource(newDocBuilder(time, term, randomIntBetween(1, 10) * randomDouble()))); + builders.add( + client().prepareIndex(INDEX).setSource(newDocBuilder(time, term, randomIntBetween(1, 10) * randomDouble())) + ); } } time += TimeValue.timeValueHours(1).millis(); @@ -111,9 +114,9 @@ private XContentBuilder newDocBuilder(long timeMillis, String fooValue, Double v public void testEmptyBucketSort() { SearchResponse response = client().prepareSearch(INDEX) - .setSize(0) - .addAggregation(dateHistogram("time_buckets").field(TIME_FIELD).fixedInterval(DateHistogramInterval.HOUR)) - .get(); + .setSize(0) + .addAggregation(dateHistogram("time_buckets").field(TIME_FIELD).fixedInterval(DateHistogramInterval.HOUR)) + .get(); assertSearchResponse(response); @@ -129,10 +132,13 @@ public void testEmptyBucketSort() { // Now let's test using size response = client().prepareSearch(INDEX) - .setSize(0) - .addAggregation(dateHistogram("time_buckets").field(TIME_FIELD).fixedInterval(DateHistogramInterval.HOUR) - .subAggregation(bucketSort("bucketSort", Collections.emptyList()).size(3))) - .get(); + .setSize(0) + .addAggregation( + dateHistogram("time_buckets").field(TIME_FIELD) + .fixedInterval(DateHistogramInterval.HOUR) + .subAggregation(bucketSort("bucketSort", Collections.emptyList()).size(3)) + ) + .get(); assertSearchResponse(response); @@ -146,10 +152,13 @@ public void testEmptyBucketSort() { // Finally, let's test using size + from response = client().prepareSearch(INDEX) - .setSize(0) - .addAggregation(dateHistogram("time_buckets").field(TIME_FIELD).fixedInterval(DateHistogramInterval.HOUR) - .subAggregation(bucketSort("bucketSort", Collections.emptyList()).size(3).from(2))) - .get(); + .setSize(0) + .addAggregation( + dateHistogram("time_buckets").field(TIME_FIELD) + .fixedInterval(DateHistogramInterval.HOUR) + .subAggregation(bucketSort("bucketSort", Collections.emptyList()).size(3).from(2)) + ) + .get(); assertSearchResponse(response); @@ -164,10 +173,11 @@ public void testEmptyBucketSort() { public void testSortTermsOnKey() { SearchResponse response = client().prepareSearch(INDEX) - .setSize(0) - .addAggregation(terms("foos").field(TERM_FIELD) - .subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("_key"))))) - .get(); + .setSize(0) + .addAggregation( + terms("foos").field(TERM_FIELD).subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("_key")))) + ) + .get(); assertSearchResponse(response); @@ -184,8 +194,10 @@ public void testSortTermsOnKey() { public void testSortTermsOnKeyWithSize() { SearchResponse response = client().prepareSearch(INDEX) .setSize(0) - .addAggregation(terms("foos").field(TERM_FIELD) - .subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("_key"))).size(3))) + .addAggregation( + terms("foos").field(TERM_FIELD) + .subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("_key"))).size(3)) + ) .get(); assertSearchResponse(response); @@ -203,12 +215,13 @@ public void testSortTermsOnKeyWithSize() { public void testSortTermsOnSubAggregation() { SearchResponse response = client().prepareSearch(INDEX) - .setSize(0) - .addAggregation(terms("foos").field(TERM_FIELD) - .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) - .subAggregation(bucketSort("bucketSort", Arrays.asList( - new FieldSortBuilder("avg_value").order(SortOrder.DESC))))) - .get(); + .setSize(0) + .addAggregation( + terms("foos").field(TERM_FIELD) + .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) + .subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("avg_value").order(SortOrder.DESC)))) + ) + .get(); assertSearchResponse(response); @@ -224,12 +237,15 @@ public void testSortTermsOnSubAggregation() { } response = client().prepareSearch(INDEX) - .setSize(0) - .addAggregation(terms("foos").field(TERM_FIELD) - .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) - .subAggregation(bucketSort("bucketSort", Arrays.asList( - new FieldSortBuilder("avg_value").order(SortOrder.DESC))).size(2).from(3))) - .get(); + .setSize(0) + .addAggregation( + terms("foos").field(TERM_FIELD) + .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) + .subAggregation( + bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("avg_value").order(SortOrder.DESC))).size(2).from(3) + ) + ) + .get(); assertSearchResponse(response); @@ -244,10 +260,12 @@ public void testSortTermsOnSubAggregation() { public void testSortTermsOnSubAggregationPreservesOrderOnEquals() { SearchResponse response = client().prepareSearch(INDEX) .setSize(0) - .addAggregation(terms("foos").field(TERM_FIELD) - .subAggregation(bucketSort("keyBucketSort", Arrays.asList(new FieldSortBuilder("_key")))) - .subAggregation(max("max").field("missingValue").missing(1)) - .subAggregation(bucketSort("maxBucketSort", Arrays.asList(new FieldSortBuilder("max"))))) + .addAggregation( + terms("foos").field(TERM_FIELD) + .subAggregation(bucketSort("keyBucketSort", Arrays.asList(new FieldSortBuilder("_key")))) + .subAggregation(max("max").field("missingValue").missing(1)) + .subAggregation(bucketSort("maxBucketSort", Arrays.asList(new FieldSortBuilder("max")))) + ) .get(); assertSearchResponse(response); @@ -266,13 +284,21 @@ public void testSortTermsOnSubAggregationPreservesOrderOnEquals() { public void testSortTermsOnCountWithSecondarySort() { SearchResponse response = client().prepareSearch(INDEX) - .setSize(0) - .addAggregation(terms("foos").field(TERM_FIELD) - .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) - .subAggregation(bucketSort("bucketSort", Arrays.asList( + .setSize(0) + .addAggregation( + terms("foos").field(TERM_FIELD) + .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) + .subAggregation( + bucketSort( + "bucketSort", + Arrays.asList( new FieldSortBuilder("_count").order(SortOrder.ASC), - new FieldSortBuilder("avg_value").order(SortOrder.DESC))))) - .get(); + new FieldSortBuilder("avg_value").order(SortOrder.DESC) + ) + ) + ) + ) + .get(); assertSearchResponse(response); @@ -295,8 +321,8 @@ public void testSortTermsOnCountWithSecondarySort() { public void testSortDateHistogramDescending() { SearchResponse response = client().prepareSearch(INDEX) - .addAggregation(dateHistogram("time_buckets").field(TIME_FIELD).fixedInterval(DateHistogramInterval.HOUR)) - .get(); + .addAggregation(dateHistogram("time_buckets").field(TIME_FIELD).fixedInterval(DateHistogramInterval.HOUR)) + .get(); assertSearchResponse(response); @@ -306,10 +332,12 @@ public void testSortDateHistogramDescending() { List ascendingTimeBuckets = histo.getBuckets(); response = client().prepareSearch(INDEX) - .addAggregation(dateHistogram("time_buckets").field(TIME_FIELD).fixedInterval(DateHistogramInterval.HOUR) - .subAggregation(bucketSort("bucketSort", Arrays.asList( - new FieldSortBuilder("_key").order(SortOrder.DESC))))) - .get(); + .addAggregation( + dateHistogram("time_buckets").field(TIME_FIELD) + .fixedInterval(DateHistogramInterval.HOUR) + .subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("_key").order(SortOrder.DESC)))) + ) + .get(); assertSearchResponse(response); @@ -327,12 +355,17 @@ public void testSortDateHistogramDescending() { public void testSortHistogram_GivenGapsAndGapPolicyIsSkip() { SearchResponse response = client().prepareSearch(INDEX_WITH_GAPS) - .addAggregation(histogram("time_buckets").field(TIME_FIELD).interval(1) - .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) - .subAggregation(bucketSort("bucketSort", Arrays.asList( - new FieldSortBuilder("avg_value").order(SortOrder.DESC))).gapPolicy( - BucketHelpers.GapPolicy.SKIP))) - .get(); + .addAggregation( + histogram("time_buckets").field(TIME_FIELD) + .interval(1) + .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) + .subAggregation( + bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("avg_value").order(SortOrder.DESC))).gapPolicy( + BucketHelpers.GapPolicy.SKIP + ) + ) + ) + .get(); assertSearchResponse(response); @@ -347,12 +380,17 @@ public void testSortHistogram_GivenGapsAndGapPolicyIsSkip() { public void testSortHistogram_GivenGapsAndGapPolicyIsSkipAndSizeIsLessThanAvailableBuckets() { SearchResponse response = client().prepareSearch(INDEX_WITH_GAPS) - .addAggregation(histogram("time_buckets").field(TIME_FIELD).interval(1) - .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) - .subAggregation(bucketSort("bucketSort", Arrays.asList( - new FieldSortBuilder("avg_value").order(SortOrder.DESC))).gapPolicy( - BucketHelpers.GapPolicy.SKIP).size(2))) - .get(); + .addAggregation( + histogram("time_buckets").field(TIME_FIELD) + .interval(1) + .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) + .subAggregation( + bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("avg_value").order(SortOrder.DESC))).gapPolicy( + BucketHelpers.GapPolicy.SKIP + ).size(2) + ) + ) + .get(); assertSearchResponse(response); @@ -367,14 +405,22 @@ public void testSortHistogram_GivenGapsAndGapPolicyIsSkipAndSizeIsLessThanAvaila public void testSortHistogram_GivenGapsAndGapPolicyIsSkipAndPrimarySortHasGaps() { SearchResponse response = client().prepareSearch(INDEX_WITH_GAPS) - .addAggregation(histogram("time_buckets").field(TIME_FIELD).interval(1) - .subAggregation(avg("avg_value_1").field(VALUE_1_FIELD)) - .subAggregation(avg("avg_value_2").field(VALUE_2_FIELD)) - .subAggregation(bucketSort("bucketSort", Arrays.asList( + .addAggregation( + histogram("time_buckets").field(TIME_FIELD) + .interval(1) + .subAggregation(avg("avg_value_1").field(VALUE_1_FIELD)) + .subAggregation(avg("avg_value_2").field(VALUE_2_FIELD)) + .subAggregation( + bucketSort( + "bucketSort", + Arrays.asList( new FieldSortBuilder("avg_value_1").order(SortOrder.DESC), - new FieldSortBuilder("avg_value_2").order(SortOrder.DESC))).gapPolicy( - BucketHelpers.GapPolicy.SKIP))) - .get(); + new FieldSortBuilder("avg_value_2").order(SortOrder.DESC) + ) + ).gapPolicy(BucketHelpers.GapPolicy.SKIP) + ) + ) + .get(); assertSearchResponse(response); @@ -390,14 +436,22 @@ public void testSortHistogram_GivenGapsAndGapPolicyIsSkipAndPrimarySortHasGaps() public void testSortHistogram_GivenGapsAndGapPolicyIsSkipAndSecondarySortHasGaps() { SearchResponse response = client().prepareSearch(INDEX_WITH_GAPS) - .addAggregation(histogram("time_buckets").field(TIME_FIELD).interval(1) - .subAggregation(avg("avg_value_1").field(VALUE_1_FIELD)) - .subAggregation(avg("avg_value_2").field(VALUE_2_FIELD)) - .subAggregation(bucketSort("bucketSort", Arrays.asList( + .addAggregation( + histogram("time_buckets").field(TIME_FIELD) + .interval(1) + .subAggregation(avg("avg_value_1").field(VALUE_1_FIELD)) + .subAggregation(avg("avg_value_2").field(VALUE_2_FIELD)) + .subAggregation( + bucketSort( + "bucketSort", + Arrays.asList( new FieldSortBuilder("avg_value_2").order(SortOrder.DESC), - new FieldSortBuilder("avg_value_1").order(SortOrder.ASC))).gapPolicy( - BucketHelpers.GapPolicy.SKIP))) - .get(); + new FieldSortBuilder("avg_value_1").order(SortOrder.ASC) + ) + ).gapPolicy(BucketHelpers.GapPolicy.SKIP) + ) + ) + .get(); assertSearchResponse(response); @@ -413,12 +467,17 @@ public void testSortHistogram_GivenGapsAndGapPolicyIsSkipAndSecondarySortHasGaps public void testSortHistogram_GivenGapsAndGapPolicyIsInsertZeros() { SearchResponse response = client().prepareSearch(INDEX_WITH_GAPS) - .addAggregation(histogram("time_buckets").field(TIME_FIELD).interval(1) - .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) - .subAggregation(bucketSort("bucketSort", Arrays.asList( - new FieldSortBuilder("avg_value").order(SortOrder.DESC))).gapPolicy( - BucketHelpers.GapPolicy.INSERT_ZEROS))) - .get(); + .addAggregation( + histogram("time_buckets").field(TIME_FIELD) + .interval(1) + .subAggregation(avg("avg_value").field(VALUE_1_FIELD)) + .subAggregation( + bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("avg_value").order(SortOrder.DESC))).gapPolicy( + BucketHelpers.GapPolicy.INSERT_ZEROS + ) + ) + ) + .get(); assertSearchResponse(response); @@ -434,11 +493,12 @@ public void testSortHistogram_GivenGapsAndGapPolicyIsInsertZeros() { public void testEmptyBuckets() { SearchResponse response = client().prepareSearch(INDEX) - .setSize(0) - .setQuery(QueryBuilders.existsQuery("non-field")) - .addAggregation(terms("foos").field(TERM_FIELD) - .subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("_key"))))) - .get(); + .setSize(0) + .setQuery(QueryBuilders.existsQuery("non-field")) + .addAggregation( + terms("foos").field(TERM_FIELD).subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("_key")))) + ) + .get(); assertSearchResponse(response); @@ -449,21 +509,27 @@ public void testEmptyBuckets() { } public void testInvalidPath() { - Exception e = expectThrows(ActionRequestValidationException.class, - () -> client().prepareSearch(INDEX) - .addAggregation(terms("foos").field(TERM_FIELD) - .subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("invalid"))))) - .get()); + Exception e = expectThrows( + ActionRequestValidationException.class, + () -> client().prepareSearch(INDEX) + .addAggregation( + terms("foos").field(TERM_FIELD).subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("invalid")))) + ) + .get() + ); assertThat(e.getMessage(), containsString("No aggregation found for path [invalid]")); } public void testNeitherSortsNorSizeSpecifiedAndFromIsDefault_ShouldThrowValidation() { - Exception e = expectThrows(ActionRequestValidationException.class, - () -> client().prepareSearch(INDEX) - .addAggregation(terms("foos").field(TERM_FIELD) - .subAggregation(bucketSort("bucketSort", Collections.emptyList()))) - .get()); - assertThat(e.getMessage(), containsString("[bucketSort] is configured to perform nothing." + - " Please set either of [sort, size, from] to use bucket_sort")); + Exception e = expectThrows( + ActionRequestValidationException.class, + () -> client().prepareSearch(INDEX) + .addAggregation(terms("foos").field(TERM_FIELD).subAggregation(bucketSort("bucketSort", Collections.emptyList()))) + .get() + ); + assertThat( + e.getMessage(), + containsString("[bucketSort] is configured to perform nothing." + " Please set either of [sort, size, from] to use bucket_sort") + ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java index 7d01a18424467..4744eafecef12 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java @@ -57,14 +57,21 @@ private ZonedDateTime date(int month, int day) { } private static IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception { - return client().prepareIndex(idx).setSource( - jsonBuilder().startObject().timeField("date", date).field("value", value).endObject()); + return client().prepareIndex(idx).setSource(jsonBuilder().startObject().timeField("date", date).field("value", value).endObject()); } private IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { - return client().prepareIndex("idx").setSource( - jsonBuilder().startObject().field("value", value).timeField("date", date(month, day)).startArray("dates") - .timeValue(date(month, day)).timeValue(date(month + 1, day + 1)).endArray().endObject()); + return client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field("value", value) + .timeField("date", date(month, day)) + .startArray("dates") + .timeValue(date(month, day)) + .timeValue(date(month + 1, day + 1)) + .endArray() + .endObject() + ); } @Override @@ -75,15 +82,22 @@ public void setupSuiteScopeCluster() throws Exception { prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource( - jsonBuilder().startObject().field("value", i * 2).endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field("value", i * 2).endObject()) + ); } - builders.addAll(Arrays.asList(indexDoc(1, 2, 1), // date: Jan 2, dates: Jan 2, Feb 3 + builders.addAll( + Arrays.asList( + indexDoc(1, 2, 1), // date: Jan 2, dates: Jan 2, Feb 3 indexDoc(2, 2, 2), // date: Feb 2, dates: Feb 2, Mar 3 indexDoc(2, 15, 3), // date: Feb 15, dates: Feb 15, Mar 16 indexDoc(3, 2, 4), // date: Mar 2, dates: Mar 2, Apr 3 indexDoc(3, 15, 5), // date: Mar 15, dates: Mar 15, Apr 16 - indexDoc(3, 23, 6))); // date: Mar 23, dates: Mar 23, Apr 24 + indexDoc(3, 23, 6) + ) + ); // date: Mar 23, dates: Mar 23, Apr 24 indexRandom(true, builders); ensureSearchable(); } @@ -94,11 +108,14 @@ public void afterEachTest() throws IOException { } public void testSingleValuedField() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).minDocCount(0) - .subAggregation(derivative("deriv", "_count"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .minDocCount(0) + .subAggregation(derivative("deriv", "_count")) + ) + .get(); assertSearchResponse(response); @@ -136,11 +153,14 @@ public void testSingleValuedField() throws Exception { } public void testSingleValuedFieldNormalised() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).minDocCount(0) - .subAggregation(derivative("deriv", "_count").unit(DateHistogramInterval.DAY))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .minDocCount(0) + .subAggregation(derivative("deriv", "_count").unit(DateHistogramInterval.DAY)) + ) + .get(); assertSearchResponse(response); @@ -197,12 +217,15 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep indexRandom(true, builders); ensureSearchable(); - SearchResponse response = client() - .prepareSearch(IDX_DST_START) - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.DAY) - .timeZone(timezone).minDocCount(0) - .subAggregation(derivative("deriv", "_count").unit(DateHistogramInterval.HOUR))) - .get(); + SearchResponse response = client().prepareSearch(IDX_DST_START) + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.DAY) + .timeZone(timezone) + .minDocCount(0) + .subAggregation(derivative("deriv", "_count").unit(DateHistogramInterval.HOUR)) + ) + .get(); assertSearchResponse(response); @@ -213,21 +236,25 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep assertThat(buckets.size(), equalTo(4)); DateFormatter dateFormatter = DateFormatter.forPattern("uuuu-MM-dd"); - ZonedDateTime expectedKeyFirstBucket = - LocalDate.from(dateFormatter.parse("2012-03-24")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + ZonedDateTime expectedKeyFirstBucket = LocalDate.from(dateFormatter.parse("2012-03-24")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null); - ZonedDateTime expectedKeySecondBucket = - LocalDate.from(dateFormatter.parse("2012-03-25")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(1), expectedKeySecondBucket,2L, notNullValue(), 1d, 1d / 24d); + ZonedDateTime expectedKeySecondBucket = LocalDate.from(dateFormatter.parse("2012-03-25")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 24d); // the following is normalized using a 23h bucket width - ZonedDateTime expectedKeyThirdBucket = - LocalDate.from(dateFormatter.parse("2012-03-26")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + ZonedDateTime expectedKeyThirdBucket = LocalDate.from(dateFormatter.parse("2012-03-26")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 23d); - ZonedDateTime expectedKeyFourthBucket = - LocalDate.from(dateFormatter.parse("2012-03-27")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + ZonedDateTime expectedKeyFourthBucket = LocalDate.from(dateFormatter.parse("2012-03-27")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d); } @@ -248,12 +275,15 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Excepti indexRandom(true, builders); ensureSearchable(); - SearchResponse response = client() - .prepareSearch(IDX_DST_END) - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.DAY) - .timeZone(timezone).minDocCount(0) - .subAggregation(derivative("deriv", "_count").unit(DateHistogramInterval.HOUR))) - .get(); + SearchResponse response = client().prepareSearch(IDX_DST_END) + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.DAY) + .timeZone(timezone) + .minDocCount(0) + .subAggregation(derivative("deriv", "_count").unit(DateHistogramInterval.HOUR)) + ) + .get(); assertSearchResponse(response); @@ -265,21 +295,25 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Excepti DateFormatter dateFormatter = DateFormatter.forPattern("uuuu-MM-dd").withZone(ZoneOffset.UTC); - ZonedDateTime expectedKeyFirstBucket = - LocalDate.from(dateFormatter.parse("2012-10-27")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + ZonedDateTime expectedKeyFirstBucket = LocalDate.from(dateFormatter.parse("2012-10-27")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null); - ZonedDateTime expectedKeySecondBucket = - LocalDate.from(dateFormatter.parse("2012-10-28")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + ZonedDateTime expectedKeySecondBucket = LocalDate.from(dateFormatter.parse("2012-10-28")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 24d); // the following is normalized using a 25h bucket width - ZonedDateTime expectedKeyThirdBucket = - LocalDate.from(dateFormatter.parse("2012-10-29")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + ZonedDateTime expectedKeyThirdBucket = LocalDate.from(dateFormatter.parse("2012-10-29")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 25d); - ZonedDateTime expectedKeyFourthBucket = - LocalDate.from(dateFormatter.parse("2012-10-30")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + ZonedDateTime expectedKeyFourthBucket = LocalDate.from(dateFormatter.parse("2012-10-30")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d); } @@ -301,12 +335,15 @@ public void testSingleValuedFieldNormalised_timeZone_AsiaKathmandu() throws Exce indexRandom(true, builders); ensureSearchable(); - SearchResponse response = client() - .prepareSearch(IDX_DST_KATHMANDU) - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.HOUR) - .timeZone(timezone).minDocCount(0) - .subAggregation(derivative("deriv", "_count").unit(DateHistogramInterval.MINUTE))) - .get(); + SearchResponse response = client().prepareSearch(IDX_DST_KATHMANDU) + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.HOUR) + .timeZone(timezone) + .minDocCount(0) + .subAggregation(derivative("deriv", "_count").unit(DateHistogramInterval.MINUTE)) + ) + .get(); assertSearchResponse(response); @@ -318,22 +355,26 @@ public void testSingleValuedFieldNormalised_timeZone_AsiaKathmandu() throws Exce DateFormatter dateFormatter = DateFormatter.forPattern("uuuu-MM-dd'T'HH:mm:ss").withZone(ZoneOffset.UTC); - ZonedDateTime expectedKeyFirstBucket = - LocalDateTime.from(dateFormatter.parse("1985-12-31T22:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null,null); + ZonedDateTime expectedKeyFirstBucket = LocalDateTime.from(dateFormatter.parse("1985-12-31T22:00:00")) + .atZone(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null); - ZonedDateTime expectedKeySecondBucket = - LocalDateTime.from(dateFormatter.parse("1985-12-31T23:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d,1d / 60d); + ZonedDateTime expectedKeySecondBucket = LocalDateTime.from(dateFormatter.parse("1985-12-31T23:00:00")) + .atZone(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 60d); // the following is normalized using a 105min bucket width - ZonedDateTime expectedKeyThirdBucket = - LocalDateTime.from(dateFormatter.parse("1986-01-01T01:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d,1d / 105d); - - ZonedDateTime expectedKeyFourthBucket = - LocalDateTime.from(dateFormatter.parse("1986-01-01T02:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d,1d / 60d); + ZonedDateTime expectedKeyThirdBucket = LocalDateTime.from(dateFormatter.parse("1986-01-01T01:00:00")) + .atZone(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 105d); + + ZonedDateTime expectedKeyFourthBucket = LocalDateTime.from(dateFormatter.parse("1986-01-01T02:00:00")) + .atZone(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 60d); } private static void addNTimes(int amount, String index, ZonedDateTime dateTime, List builders) throws Exception { @@ -342,8 +383,14 @@ private static void addNTimes(int amount, String index, ZonedDateTime dateTime, } } - private static void assertBucket(Histogram.Bucket bucket, ZonedDateTime expectedKey, long expectedDocCount, - Matcher derivativeMatcher, Double derivative, Double normalizedDerivative) { + private static void assertBucket( + Histogram.Bucket bucket, + ZonedDateTime expectedKey, + long expectedDocCount, + Matcher derivativeMatcher, + Double derivative, + Double normalizedDerivative + ) { assertThat(bucket, notNullValue()); assertThat((ZonedDateTime) bucket.getKey(), equalTo(expectedKey)); assertThat(bucket.getDocCount(), equalTo(expectedDocCount)); @@ -356,12 +403,15 @@ private static void assertBucket(Histogram.Bucket bucket, ZonedDateTime expected } public void testSingleValuedFieldWithSubAggregation() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).minDocCount(0) - .subAggregation(sum("sum").field("value")).subAggregation(derivative("deriv", "sum"))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .minDocCount(0) + .subAggregation(sum("sum").field("value")) + .subAggregation(derivative("deriv", "sum")) + ) + .get(); assertSearchResponse(response); @@ -370,9 +420,9 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { assertThat(histo.getName(), equalTo("histo")); List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)histo).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)histo).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation)histo).getProperty("sum.value"); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) histo).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) histo).getProperty("_count"); + Object[] propertiesCounts = (Object[]) ((InternalAggregation) histo).getProperty("sum.value"); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); @@ -401,8 +451,13 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, notNullValue()); assertThat(deriv.value(), equalTo(4.0)); - assertThat(((InternalMultiBucketAggregation.InternalBucket)bucket).getProperty( - "histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo(4.0)); + assertThat( + ((InternalMultiBucketAggregation.InternalBucket) bucket).getProperty( + "histo", + AggregationPath.parse("deriv.value").getPathElementsAsStringList() + ), + equalTo(4.0) + ); assertThat((ZonedDateTime) propertiesKeys[1], equalTo(key)); assertThat((long) propertiesDocCounts[1], equalTo(2L)); assertThat((double) propertiesCounts[1], equalTo(5.0)); @@ -419,19 +474,27 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, notNullValue()); assertThat(deriv.value(), equalTo(10.0)); - assertThat(((InternalMultiBucketAggregation.InternalBucket)bucket).getProperty( - "histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo(10.0)); + assertThat( + ((InternalMultiBucketAggregation.InternalBucket) bucket).getProperty( + "histo", + AggregationPath.parse("deriv.value").getPathElementsAsStringList() + ), + equalTo(10.0) + ); assertThat((ZonedDateTime) propertiesKeys[2], equalTo(key)); assertThat((long) propertiesDocCounts[2], equalTo(3L)); assertThat((double) propertiesCounts[2], equalTo(15.0)); } public void testMultiValuedField() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH).minDocCount(0) - .subAggregation(derivative("deriv", "_count"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + dateHistogram("histo").field("dates") + .calendarInterval(DateHistogramInterval.MONTH) + .minDocCount(0) + .subAggregation(derivative("deriv", "_count")) + ) + .get(); assertSearchResponse(response); @@ -460,7 +523,7 @@ public void testMultiValuedField() throws Exception { assertThat(docCountDeriv, notNullValue()); assertThat(docCountDeriv.value(), equalTo(2.0)); - key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0,ZoneOffset.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); @@ -482,11 +545,14 @@ public void testMultiValuedField() throws Exception { } public void testUnmapped() throws Exception { - SearchResponse response = client() - .prepareSearch("idx_unmapped") - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).minDocCount(0) - .subAggregation(derivative("deriv", "_count"))).get(); + SearchResponse response = client().prepareSearch("idx_unmapped") + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .minDocCount(0) + .subAggregation(derivative("deriv", "_count")) + ) + .get(); assertSearchResponse(response); @@ -497,11 +563,14 @@ public void testUnmapped() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client() - .prepareSearch("idx", "idx_unmapped") - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).minDocCount(0) - .subAggregation(derivative("deriv", "_count"))).get(); + SearchResponse response = client().prepareSearch("idx", "idx_unmapped") + .addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .minDocCount(0) + .subAggregation(derivative("deriv", "_count")) + ) + .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java index 1087bbac9bbb7..52db45710607b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java @@ -133,8 +133,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numBuckets_empty_rnd; i++) { valueCounts_empty_rnd[i] = (long) randomIntBetween(1, 10); // make approximately half of the buckets empty - if (randomBoolean()) - valueCounts_empty_rnd[i] = 0L; + if (randomBoolean()) valueCounts_empty_rnd[i] = 0L; for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) { builders.add(client().prepareIndex("empty_bucket_idx_rnd").setSource(newDocBuilder(i))); numDocsEmptyIdx_rnd++; @@ -157,12 +156,14 @@ private XContentBuilder newDocBuilder(int singleValueFieldValue) throws IOExcept */ public void testDocCountDerivative() { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .subAggregation(derivative("deriv", "_count")) - .subAggregation(derivative("2nd_deriv", "deriv"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .subAggregation(derivative("deriv", "_count")) + .subAggregation(derivative("2nd_deriv", "deriv")) + ) + .get(); assertSearchResponse(response); @@ -196,12 +197,15 @@ public void testDocCountDerivative() { * test first and second derivative on the sing */ public void testSingleValuedField_normalised() { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).minDocCount(0) - .subAggregation(derivative("deriv", "_count").unit("1ms")) - .subAggregation(derivative("2nd_deriv", "deriv").unit("10ms"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .minDocCount(0) + .subAggregation(derivative("deriv", "_count").unit("1ms")) + .subAggregation(derivative("2nd_deriv", "deriv").unit("10ms")) + ) + .get(); assertSearchResponse(response); @@ -234,12 +238,14 @@ public void testSingleValuedField_normalised() { } public void testSingleValueAggDerivative() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - .subAggregation(derivative("deriv", "sum"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + .subAggregation(derivative("deriv", "sum")) + ) + .get(); assertSearchResponse(response); @@ -247,9 +253,9 @@ public void testSingleValueAggDerivative() throws Exception { assertThat(deriv, notNullValue()); assertThat(deriv.getName(), equalTo("histo")); assertThat(deriv.getBuckets().size(), equalTo(numValueBuckets)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)deriv).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)deriv).getProperty("_count"); - Object[] propertiesSumCounts = (Object[]) ((InternalAggregation)deriv).getProperty("sum.value"); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) deriv).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) deriv).getProperty("_count"); + Object[] propertiesSumCounts = (Object[]) ((InternalAggregation) deriv).getProperty("sum.value"); List buckets = new ArrayList<>(deriv.getBuckets()); Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets @@ -266,9 +272,13 @@ public void testSingleValueAggDerivative() throws Exception { assertThat(sumDeriv, notNullValue()); long sumDerivValue = expectedSum - expectedSumPreviousBucket; assertThat(sumDeriv.value(), equalTo((double) sumDerivValue)); - assertThat(((InternalMultiBucketAggregation.InternalBucket)bucket).getProperty("histo", - AggregationPath.parse("deriv.value").getPathElementsAsStringList()), - equalTo((double) sumDerivValue)); + assertThat( + ((InternalMultiBucketAggregation.InternalBucket) bucket).getProperty( + "histo", + AggregationPath.parse("deriv.value").getPathElementsAsStringList() + ), + equalTo((double) sumDerivValue) + ); } else { assertThat(sumDeriv, nullValue()); } @@ -280,12 +290,14 @@ public void testSingleValueAggDerivative() throws Exception { } public void testMultiValueAggDerivative() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - .subAggregation(derivative("deriv", "stats.sum"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + .subAggregation(derivative("deriv", "stats.sum")) + ) + .get(); assertSearchResponse(response); @@ -293,9 +305,9 @@ public void testMultiValueAggDerivative() throws Exception { assertThat(deriv, notNullValue()); assertThat(deriv.getName(), equalTo("histo")); assertThat(deriv.getBuckets().size(), equalTo(numValueBuckets)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)deriv).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)deriv).getProperty("_count"); - Object[] propertiesSumCounts = (Object[]) ((InternalAggregation)deriv).getProperty("stats.sum"); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) deriv).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) deriv).getProperty("_count"); + Object[] propertiesSumCounts = (Object[]) ((InternalAggregation) deriv).getProperty("stats.sum"); List buckets = new ArrayList<>(deriv.getBuckets()); Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets @@ -312,9 +324,13 @@ public void testMultiValueAggDerivative() throws Exception { assertThat(sumDeriv, notNullValue()); long sumDerivValue = expectedSum - expectedSumPreviousBucket; assertThat(sumDeriv.value(), equalTo((double) sumDerivValue)); - assertThat(((InternalMultiBucketAggregation.InternalBucket)bucket).getProperty("histo", - AggregationPath.parse("deriv.value").getPathElementsAsStringList()), - equalTo((double) sumDerivValue)); + assertThat( + ((InternalMultiBucketAggregation.InternalBucket) bucket).getProperty( + "histo", + AggregationPath.parse("deriv.value").getPathElementsAsStringList() + ), + equalTo((double) sumDerivValue) + ); } else { assertThat(sumDeriv, nullValue()); } @@ -326,11 +342,11 @@ public void testMultiValueAggDerivative() throws Exception { } public void testUnmapped() throws Exception { - SearchResponse response = client() - .prepareSearch("idx_unmapped") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .subAggregation(derivative("deriv", "_count"))).get(); + SearchResponse response = client().prepareSearch("idx_unmapped") + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).subAggregation(derivative("deriv", "_count")) + ) + .get(); assertSearchResponse(response); @@ -341,11 +357,11 @@ public void testUnmapped() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client() - .prepareSearch("idx", "idx_unmapped") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .subAggregation(derivative("deriv", "_count"))).get(); + SearchResponse response = client().prepareSearch("idx", "idx_unmapped") + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).subAggregation(derivative("deriv", "_count")) + ) + .get(); assertSearchResponse(response); @@ -369,12 +385,10 @@ public void testPartiallyUnmapped() throws Exception { } public void testDocCountDerivativeWithGaps() throws Exception { - SearchResponse searchResponse = client() - .prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1) - .subAggregation(derivative("deriv", "_count"))).get(); + SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") + .setQuery(matchAllQuery()) + .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1).subAggregation(derivative("deriv", "_count"))) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numDocsEmptyIdx)); @@ -397,14 +411,15 @@ public void testDocCountDerivativeWithGaps() throws Exception { } public void testDocCountDerivativeWithGaps_random() throws Exception { - SearchResponse searchResponse = client() - .prepareSearch("empty_bucket_idx_rnd") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1) - .extendedBounds(0L, numBuckets_empty_rnd - 1) - .subAggregation(derivative("deriv", "_count").gapPolicy(randomFrom(GapPolicy.values())))) - .get(); + SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx_rnd") + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(1) + .extendedBounds(0L, numBuckets_empty_rnd - 1) + .subAggregation(derivative("deriv", "_count").gapPolicy(randomFrom(GapPolicy.values()))) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numDocsEmptyIdx_rnd)); @@ -427,12 +442,14 @@ public void testDocCountDerivativeWithGaps_random() throws Exception { } public void testDocCountDerivativeWithGaps_insertZeros() throws Exception { - SearchResponse searchResponse = client() - .prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1) - .subAggregation(derivative("deriv", "_count").gapPolicy(GapPolicy.INSERT_ZEROS))).get(); + SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(1) + .subAggregation(derivative("deriv", "_count").gapPolicy(GapPolicy.INSERT_ZEROS)) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numDocsEmptyIdx)); @@ -455,13 +472,15 @@ public void testDocCountDerivativeWithGaps_insertZeros() throws Exception { } public void testSingleValueAggDerivativeWithGaps() throws Exception { - SearchResponse searchResponse = client() - .prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - .subAggregation(derivative("deriv", "sum"))).get(); + SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(1) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + .subAggregation(derivative("deriv", "sum")) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numDocsEmptyIdx)); @@ -496,13 +515,15 @@ public void testSingleValueAggDerivativeWithGaps() throws Exception { } public void testSingleValueAggDerivativeWithGaps_insertZeros() throws Exception { - SearchResponse searchResponse = client() - .prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - .subAggregation(derivative("deriv", "sum").gapPolicy(GapPolicy.INSERT_ZEROS))).get(); + SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(1) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + .subAggregation(derivative("deriv", "sum").gapPolicy(GapPolicy.INSERT_ZEROS)) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numDocsEmptyIdx)); @@ -534,14 +555,16 @@ public void testSingleValueAggDerivativeWithGaps_insertZeros() throws Exception public void testSingleValueAggDerivativeWithGaps_random() throws Exception { GapPolicy gapPolicy = randomFrom(GapPolicy.values()); - SearchResponse searchResponse = client() - .prepareSearch("empty_bucket_idx_rnd") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1) - .extendedBounds(0L, (long) numBuckets_empty_rnd - 1) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - .subAggregation(derivative("deriv", "sum").gapPolicy(gapPolicy))).get(); + SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx_rnd") + .setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(1) + .extendedBounds(0L, (long) numBuckets_empty_rnd - 1) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + .subAggregation(derivative("deriv", "sum").gapPolicy(gapPolicy)) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numDocsEmptyIdx_rnd)); @@ -578,14 +601,17 @@ public void testSingleValueAggDerivativeWithGaps_random() throws Exception { public void testSingleValueAggDerivative_invalidPath() throws Exception { try { client().prepareSearch("idx") - .addAggregation( - histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .subAggregation( - filters("filters", QueryBuilders.termQuery("tag", "foo")).subAggregation( - sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(derivative("deriv", "filters>get>sum"))).get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .subAggregation( + filters("filters", QueryBuilders.termQuery("tag", "foo")).subAggregation( + sum("sum").field(SINGLE_VALUED_FIELD_NAME) + ) + ) + .subAggregation(derivative("deriv", "filters>get>sum")) + ) + .get(); fail("Expected an Exception but didn't get one"); } catch (Exception e) { Throwable cause = ExceptionsHelper.unwrapCause(e); @@ -612,29 +638,31 @@ public void testDerivDerivNPE() throws Exception { value = null; } - XContentBuilder doc = jsonBuilder() - .startObject() - .field("tick", i) - .field("value", value) - .endObject(); + XContentBuilder doc = jsonBuilder().startObject().field("tick", i).field("value", value).endObject(); client().prepareIndex("deriv_npe").setSource(doc).get(); } refresh(); - SearchResponse response = client() - .prepareSearch("deriv_npe") - .addAggregation( - histogram("histo").field("tick").interval(1) - .subAggregation(avg("avg").field("value")) - .subAggregation(derivative("deriv1", "avg")) - .subAggregation(derivative("deriv2", "deriv1"))).get(); + SearchResponse response = client().prepareSearch("deriv_npe") + .addAggregation( + histogram("histo").field("tick") + .interval(1) + .subAggregation(avg("avg").field("value")) + .subAggregation(derivative("deriv1", "avg")) + .subAggregation(derivative("deriv2", "deriv1")) + ) + .get(); assertSearchResponse(response); } - private void checkBucketKeyAndDocCount(final String msg, final Histogram.Bucket bucket, final long expectedKey, - final long expectedDocCount) { + private void checkBucketKeyAndDocCount( + final String msg, + final Histogram.Bucket bucket, + final long expectedKey, + final long expectedDocCount + ) { assertThat(msg, bucket, notNullValue()); assertThat(msg + " key", ((Number) bucket.getKey()).longValue(), equalTo(expectedKey)); assertThat(msg + " docCount", bucket.getDocCount(), equalTo(expectedDocCount)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index 6f673cdb4aecc..ecef1b8aa0f86 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -50,8 +50,7 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping("tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped", "idx_gappy"); numDocs = randomIntBetween(6, 20); @@ -67,9 +66,15 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); - builders.add(client().prepareIndex("idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval)) - .endObject())); + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, fieldValue) + .field("tag", "tag" + (i % interval)) + .endObject() + ) + ); final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1); valueCounts[bucket]++; } @@ -77,14 +82,20 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < 6; i++) { // creates 6 documents where the value of the field is 0, 1, 2, 3, // 3, 5 - builders.add(client().prepareIndex("idx_gappy").setId("" + i).setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i == 4 ? 3 : i).endObject())); + builders.add( + client().prepareIndex("idx_gappy") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i == 4 ? 3 : i).endObject()) + ); } assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); @@ -96,8 +107,9 @@ public void setupSuiteScopeCluster() throws Exception { public void testGappyIndexWithSigma() { double sigma = randomDoubleBetween(1.0, 6.0, true); SearchResponse response = client().prepareSearch("idx_gappy") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L)) - .addAggregation(extendedStatsBucket("extended_stats_bucket", "histo>_count").sigma(sigma)).get(); + .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L)) + .addAggregation(extendedStatsBucket("extended_stats_bucket", "histo>_count").sigma(sigma)) + .get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -126,7 +138,7 @@ public void testGappyIndexWithSigma() { double sumOfSqrs = 1.0 + 1.0 + 1.0 + 4.0 + 0.0 + 1.0; double avg = sum / count; double var = (sumOfSqrs - ((sum * sum) / count)) / count; - var = var < 0 ? 0 : var; + var = var < 0 ? 0 : var; double stdDev = Math.sqrt(var); assertThat(extendedStatsBucketValue, notNullValue()); assertThat(extendedStatsBucketValue.getName(), equalTo("extended_stats_bucket")); @@ -144,9 +156,11 @@ public void testGappyIndexWithSigma() { public void testDocCountTopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .addAggregation(extendedStatsBucket("extended_stats_bucket", "histo>_count")).get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .addAggregation(extendedStatsBucket("extended_stats_bucket", "histo>_count")) + .get(); assertSearchResponse(response); @@ -184,16 +198,16 @@ public void testDocCountTopLevel() throws Exception { } public void testDocCountAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>_count"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>_count")) + ) + .get(); assertSearchResponse(response); @@ -241,10 +255,10 @@ public void testDocCountAsSubAgg() throws Exception { } public void testMetricTopLevel() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(extendedStatsBucket("extended_stats_bucket", "terms>sum")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(extendedStatsBucket("extended_stats_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -284,17 +298,19 @@ public void testMetricTopLevel() throws Exception { } public void testMetricAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum")) + ) + .get(); assertSearchResponse(response); @@ -346,19 +362,19 @@ public void testMetricAsSubAgg() throws Exception { } public void testMetricAsSubAggWithInsertZeros() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum") - .gapPolicy(GapPolicy.INSERT_ZEROS))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS)) + ) + .get(); assertSearchResponse(response); @@ -410,9 +426,13 @@ public void testMetricAsSubAggWithInsertZeros() throws Exception { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(extendedStatsBucket("extended_stats_bucket", "terms>sum")).get(); + .addAggregation( + terms("terms").field("tag") + .includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .addAggregation(extendedStatsBucket("extended_stats_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -429,18 +449,22 @@ public void testNoBuckets() throws Exception { } public void testBadSigmaAsSubAgg() throws Exception { - Exception ex = expectThrows(Exception.class, () -> client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum") - .sigma(-1.0))).get()); + Exception ex = expectThrows( + Exception.class, + () -> client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum").sigma(-1.0)) + ) + .get() + ); Throwable cause = ExceptionsHelper.unwrapCause(ex); if (cause == null) { throw ex; @@ -456,17 +480,17 @@ public void testBadSigmaAsSubAgg() throws Exception { } public void testNested() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(extendedStatsBucket("avg_histo_bucket", "histo>_count"))) - .addAggregation(extendedStatsBucket("avg_terms_bucket", "terms>avg_histo_bucket.avg")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(extendedStatsBucket("avg_histo_bucket", "histo>_count")) + ) + .addAggregation(extendedStatsBucket("avg_terms_bucket", "terms>avg_histo_bucket.avg")) + .get(); assertSearchResponse(response); @@ -508,7 +532,6 @@ public void testNested() throws Exception { assertThat(extendedStatsBucketValue.getName(), equalTo("avg_histo_bucket")); assertThat(extendedStatsBucketValue.getAvg(), equalTo(avgHistoValue)); - aggTermsCount++; aggTermsSum += avgHistoValue; min = Math.min(min, avgHistoValue); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java index f73b76054fc4b..6a183f15d1c04 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java @@ -61,8 +61,7 @@ public class MaxBucketIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping("tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -78,17 +77,26 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); - builders.add(client().prepareIndex("idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval)) - .endObject())); + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, fieldValue) + .field("tag", "tag" + (i % interval)) + .endObject() + ) + ); final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1); valueCounts[bucket]++; } assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); @@ -96,9 +104,11 @@ public void setupSuiteScopeCluster() throws Exception { public void testDocCountTopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .addAggregation(maxBucket("max_bucket", "histo>_count")).get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .addAggregation(maxBucket("max_bucket", "histo>_count")) + .get(); assertSearchResponse(response); @@ -132,16 +142,16 @@ public void testDocCountTopLevel() throws Exception { } public void testDocCountAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(maxBucket("max_bucket", "histo>_count"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(maxBucket("max_bucket", "histo>_count")) + ) + .get(); assertSearchResponse(response); @@ -185,10 +195,10 @@ public void testDocCountAsSubAgg() throws Exception { } public void testMetricTopLevel() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(maxBucket("max_bucket", "terms>sum")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(maxBucket("max_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -224,17 +234,19 @@ public void testMetricTopLevel() throws Exception { } public void testMetricAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(maxBucket("max_bucket", "histo>sum"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(maxBucket("max_bucket", "histo>sum")) + ) + .get(); assertSearchResponse(response); @@ -282,15 +294,16 @@ public void testMetricAsSubAgg() throws Exception { } public void testMetricAsSubAggOfSingleBucketAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - filter("filter", termQuery("tag", "tag0")) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(maxBucket("max_bucket", "histo>sum"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + filter("filter", termQuery("tag", "tag0")).subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ).subAggregation(maxBucket("max_bucket", "histo>sum")) + ) + .get(); assertSearchResponse(response); @@ -329,18 +342,19 @@ public void testMetricAsSubAggOfSingleBucketAgg() throws Exception { } public void testMetricAsSubAggWithInsertZeros() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(maxBucket("max_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(maxBucket("max_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS)) + ) + .get(); assertSearchResponse(response); @@ -387,9 +401,13 @@ public void testMetricAsSubAggWithInsertZeros() throws Exception { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(maxBucket("max_bucket", "terms>sum")).get(); + .addAggregation( + terms("terms").field("tag") + .includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .addAggregation(maxBucket("max_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -407,17 +425,17 @@ public void testNoBuckets() throws Exception { } public void testNested() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(maxBucket("max_histo_bucket", "histo>_count"))) - .addAggregation(maxBucket("max_terms_bucket", "terms>max_histo_bucket")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(maxBucket("max_histo_bucket", "histo>_count")) + ) + .addAggregation(maxBucket("max_terms_bucket", "terms>max_histo_bucket")) + .get(); assertSearchResponse(response); @@ -491,19 +509,36 @@ public void testFieldIsntWrittenOutTwice() throws Exception { // so that there is an UnmappedTerms in the list to reduce. createIndex("foo_1"); - XContentBuilder builder = jsonBuilder().startObject().startObject("properties") - .startObject("@timestamp").field("type", "date").endObject() - .startObject("license").startObject("properties") - .startObject("count").field("type", "long").endObject() - .startObject("partnumber").field("type", "text").startObject("fields").startObject("keyword") - .field("type", "keyword").field("ignore_above", 256) - .endObject().endObject().endObject() - .endObject().endObject().endObject().endObject(); - assertAcked(client().admin().indices().prepareCreate("foo_2") - .setMapping(builder).get()); + XContentBuilder builder = jsonBuilder().startObject() + .startObject("properties") + .startObject("@timestamp") + .field("type", "date") + .endObject() + .startObject("license") + .startObject("properties") + .startObject("count") + .field("type", "long") + .endObject() + .startObject("partnumber") + .field("type", "text") + .startObject("fields") + .startObject("keyword") + .field("type", "keyword") + .field("ignore_above", 256) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(client().admin().indices().prepareCreate("foo_2").setMapping(builder).get()); XContentBuilder docBuilder = jsonBuilder().startObject() - .startObject("license").field("partnumber", "foobar").field("count", 2).endObject() + .startObject("license") + .field("partnumber", "foobar") + .field("count", 2) + .endObject() .field("@timestamp", "2018-07-08T08:07:00.599Z") .endObject(); @@ -513,11 +548,14 @@ public void testFieldIsntWrittenOutTwice() throws Exception { TermsAggregationBuilder groupByLicenseAgg = AggregationBuilders.terms("group_by_license_partnumber") .field("license.partnumber.keyword"); - MaxBucketPipelineAggregationBuilder peakPipelineAggBuilder = - PipelineAggregatorBuilders.maxBucket("peak", "licenses_per_day>total_licenses"); + MaxBucketPipelineAggregationBuilder peakPipelineAggBuilder = PipelineAggregatorBuilders.maxBucket( + "peak", + "licenses_per_day>total_licenses" + ); SumAggregationBuilder sumAggBuilder = AggregationBuilders.sum("total_licenses").field("license.count"); - DateHistogramAggregationBuilder licensePerDayBuilder = - AggregationBuilders.dateHistogram("licenses_per_day").field("@timestamp").fixedInterval(DateHistogramInterval.DAY); + DateHistogramAggregationBuilder licensePerDayBuilder = AggregationBuilders.dateHistogram("licenses_per_day") + .field("@timestamp") + .fixedInterval(DateHistogramInterval.DAY); licensePerDayBuilder.subAggregation(sumAggBuilder); groupByLicenseAgg.subAggregation(licensePerDayBuilder); groupByLicenseAgg.subAggregation(peakPipelineAggBuilder); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java index 91bec4a323465..b327164ff5868 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java @@ -10,13 +10,13 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; -import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -47,8 +47,7 @@ public class MinBucketIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping("tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -64,17 +63,26 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); - builders.add(client().prepareIndex("idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval)) - .endObject())); + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, fieldValue) + .field("tag", "tag" + (i % interval)) + .endObject() + ) + ); final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1); valueCounts[bucket]++; } assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); @@ -82,9 +90,11 @@ public void setupSuiteScopeCluster() throws Exception { public void testDocCountTopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .addAggregation(minBucket("min_bucket", "histo>_count")).get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .addAggregation(minBucket("min_bucket", "histo>_count")) + .get(); assertSearchResponse(response); @@ -118,16 +128,16 @@ public void testDocCountTopLevel() throws Exception { } public void testDocCountAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(minBucket("min_bucket", "histo>_count"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(minBucket("min_bucket", "histo>_count")) + ) + .get(); assertSearchResponse(response); @@ -171,10 +181,10 @@ public void testDocCountAsSubAgg() throws Exception { } public void testMetricTopLevel() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(minBucket("min_bucket", "terms>sum")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(minBucket("min_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -210,17 +220,19 @@ public void testMetricTopLevel() throws Exception { } public void testMetricAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(minBucket("min_bucket", "histo>sum"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(minBucket("min_bucket", "histo>sum")) + ) + .get(); assertSearchResponse(response); @@ -268,18 +280,19 @@ public void testMetricAsSubAgg() throws Exception { } public void testMetricAsSubAggWithInsertZeros() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(minBucket("min_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(minBucket("min_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS)) + ) + .get(); assertSearchResponse(response); @@ -326,9 +339,13 @@ public void testMetricAsSubAggWithInsertZeros() throws Exception { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(minBucket("min_bucket", "terms>sum")).get(); + .addAggregation( + terms("terms").field("tag") + .includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .addAggregation(minBucket("min_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -346,17 +363,17 @@ public void testNoBuckets() throws Exception { } public void testNested() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(minBucket("min_histo_bucket", "histo>_count"))) - .addAggregation(minBucket("min_terms_bucket", "terms>min_histo_bucket")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(minBucket("min_histo_bucket", "histo>_count")) + ) + .addAggregation(minBucket("min_terms_bucket", "terms>min_histo_bucket")) + .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index 8c243a1740a4a..69e0fa84f9086 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -41,7 +41,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; - private static final double[] PERCENTS = {0.0, 1.0, 25.0, 50.0, 75.0, 99.0, 100.0}; + private static final double[] PERCENTS = { 0.0, 1.0, 25.0, 50.0, 75.0, 99.0, 100.0 }; static int numDocs; static int interval; static int minRandomValue; @@ -51,8 +51,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping("tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -68,17 +67,26 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); - builders.add(client().prepareIndex("idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval)) - .endObject())); + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, fieldValue) + .field("tag", "tag" + (i % interval)) + .endObject() + ) + ); final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1); valueCounts[bucket]++; } assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); @@ -86,10 +94,11 @@ public void setupSuiteScopeCluster() throws Exception { public void testDocCountopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .addAggregation(percentilesBucket("percentiles_bucket", "histo>_count") - .setPercents(PERCENTS)).get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .addAggregation(percentilesBucket("percentiles_bucket", "histo>_count").setPercents(PERCENTS)) + .get(); assertSearchResponse(response); @@ -117,17 +126,16 @@ public void testDocCountopLevel() throws Exception { } public void testDocCountAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(percentilesBucket("percentiles_bucket", "histo>_count") - .setPercents(PERCENTS))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(percentilesBucket("percentiles_bucket", "histo>_count").setPercents(PERCENTS)) + ) + .get(); assertSearchResponse(response); @@ -165,11 +173,10 @@ public void testDocCountAsSubAgg() throws Exception { } public void testMetricTopLevel() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") - .setPercents(PERCENTS)).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum").setPercents(PERCENTS)) + .get(); assertSearchResponse(response); @@ -199,10 +206,10 @@ public void testMetricTopLevel() throws Exception { } public void testMetricTopLevelDefaultPercents() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -232,18 +239,19 @@ public void testMetricTopLevelDefaultPercents() throws Exception { } public void testMetricAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(percentilesBucket("percentiles_bucket", "histo>sum") - .setPercents(PERCENTS))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(percentilesBucket("percentiles_bucket", "histo>sum").setPercents(PERCENTS)) + ) + .get(); assertSearchResponse(response); @@ -285,20 +293,22 @@ public void testMetricAsSubAgg() throws Exception { } public void testMetricAsSubAggWithInsertZeros() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(percentilesBucket("percentiles_bucket", "histo>sum") - .gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS) - .setPercents(PERCENTS))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation( + percentilesBucket("percentiles_bucket", "histo>sum").gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS) + .setPercents(PERCENTS) + ) + ) + .get(); assertSearchResponse(response); @@ -340,10 +350,13 @@ public void testMetricAsSubAggWithInsertZeros() throws Exception { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") - .setPercents(PERCENTS)).get(); + .addAggregation( + terms("terms").field("tag") + .includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum").setPercents(PERCENTS)) + .get(); assertSearchResponse(response); @@ -363,10 +376,13 @@ public void testNoBuckets() throws Exception { public void testWrongPercents() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") - .setPercents(PERCENTS)).get(); + .addAggregation( + terms("terms").field("tag") + .includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum").setPercents(PERCENTS)) + .get(); assertSearchResponse(response); @@ -389,13 +405,13 @@ public void testWrongPercents() throws Exception { } public void testBadPercents() throws Exception { - double[] badPercents = {-1.0, 110.0}; + double[] badPercents = { -1.0, 110.0 }; try { client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") - .setPercents(badPercents)).get(); + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum").setPercents(badPercents)) + .get(); fail("Illegal percent's were provided but no exception was thrown."); } catch (Exception e) { @@ -416,20 +432,21 @@ public void testBadPercents() throws Exception { } public void testBadPercents_asSubAgg() throws Exception { - double[] badPercents = {-1.0, 110.0}; + double[] badPercents = { -1.0, 110.0 }; try { - client() - .prepareSearch("idx") + client().prepareSearch("idx") .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(percentilesBucket("percentiles_bucket", "histo>_count") - .setPercents(badPercents))).get(); + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(percentilesBucket("percentiles_bucket", "histo>_count").setPercents(badPercents)) + ) + .get(); fail("Illegal percent's were provided but no exception was thrown."); } catch (Exception e) { @@ -450,18 +467,17 @@ public void testBadPercents_asSubAgg() throws Exception { } public void testNested() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count").setPercents(PERCENTS))) - .addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket.50") - .setPercents(PERCENTS)).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count").setPercents(PERCENTS)) + ) + .addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket.50").setPercents(PERCENTS)) + .get(); assertSearchResponse(response); @@ -508,20 +524,18 @@ public void testNested() throws Exception { } public void testNestedWithDecimal() throws Exception { - double[] percent = {99.9}; - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count") - .setPercents(percent))) - .addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket[99.9]") - .setPercents(percent)).get(); + double[] percent = { 99.9 }; + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count").setPercents(percent)) + ) + .addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket[99.9]").setPercents(percent)) + .get(); assertSearchResponse(response); @@ -565,7 +579,7 @@ public void testNestedWithDecimal() throws Exception { assertThat(percentilesBucketValue, notNullValue()); assertThat(percentilesBucketValue.getName(), equalTo("percentile_terms_bucket")); for (Double p : percent) { - double expected = values[(int)((p / 100) * values.length)]; + double expected = values[(int) ((p / 100) * values.length)]; assertThat(percentilesBucketValue.percentile(p), equalTo(expected)); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffIT.java index 2fd8be334ff3e..c9e7ad2e41ec9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffIT.java @@ -51,7 +51,8 @@ public class SerialDiffIT extends ESIntegTestCase { static Map> testValues; enum MetricTarget { - VALUE ("value"), COUNT("count"); + VALUE("value"), + COUNT("count"); private final String name; @@ -60,14 +61,13 @@ enum MetricTarget { } @Override - public String toString(){ + public String toString() { return name; } } - private ValuesSourceAggregationBuilder< - ? extends ValuesSourceAggregationBuilder> randomMetric(String name, String field) { - int rand = randomIntBetween(0,3); + private ValuesSourceAggregationBuilder> randomMetric(String name, String field) { + int rand = randomIntBetween(0, 3); switch (rand) { case 0: @@ -100,8 +100,11 @@ private void assertBucketContents(Histogram.Bucket actual, Double expectedCount, assertThat("[_count] diff is not null", countDiff, nullValue()); } else { assertThat("[_count] diff is null", countDiff, notNullValue()); - assertThat("[_count] diff does not match expected [" + countDiff.value() + " vs " + expectedCount + "]", - countDiff.value(), closeTo(expectedCount, 0.1)); + assertThat( + "[_count] diff does not match expected [" + countDiff.value() + " vs " + expectedCount + "]", + countDiff.value(), + closeTo(expectedCount, 0.1) + ); } // This is a gap bucket @@ -110,19 +113,20 @@ private void assertBucketContents(Histogram.Bucket actual, Double expectedCount, assertThat("[value] diff is not null", valuesDiff, Matchers.nullValue()); } else { assertThat("[value] diff is null", valuesDiff, notNullValue()); - assertThat("[value] diff does not match expected [" + valuesDiff.value() + " vs " + expectedValue + "]", - valuesDiff.value(), closeTo(expectedValue, 0.1)); + assertThat( + "[value] diff does not match expected [" + valuesDiff.value() + " vs " + expectedValue + "]", + valuesDiff.value(), + closeTo(expectedValue, 0.1) + ); } } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); List builders = new ArrayList<>(); - interval = 5; numBuckets = randomIntBetween(10, 80); lag = randomIntBetween(1, numBuckets / 2); @@ -139,9 +143,10 @@ public void setupSuiteScopeCluster() throws Exception { for (PipelineAggregationHelperTests.MockBucket mockBucket : mockHisto) { for (double value : mockBucket.docValues) { - builders.add(client().prepareIndex("idx").setSource(jsonBuilder().startObject() - .field(INTERVAL_FIELD, mockBucket.key) - .field(VALUE_FIELD, value).endObject())); + builders.add( + client().prepareIndex("idx") + .setSource(jsonBuilder().startObject().field(INTERVAL_FIELD, mockBucket.key).field(VALUE_FIELD, value).endObject()) + ); } } @@ -216,19 +221,16 @@ private void setupExpected(MetricTarget target) { } public void testBasicDiff() { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) - .subAggregation(metric) - .subAggregation(diff("diff_counts", "_count") - .lag(lag) - .gapPolicy(gapPolicy)) - .subAggregation(diff("diff_values", "the_metric") - .lag(lag) - .gapPolicy(gapPolicy)) - ).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo").field(INTERVAL_FIELD) + .interval(interval) + .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .subAggregation(metric) + .subAggregation(diff("diff_counts", "_count").lag(lag).gapPolicy(gapPolicy)) + .subAggregation(diff("diff_values", "the_metric").lag(lag).gapPolicy(gapPolicy)) + ) + .get(); assertSearchResponse(response); @@ -255,7 +257,7 @@ public void testBasicDiff() { Double expectedValue = expectedValuesIter.next(); assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); - assertThat("doc counts do not match", actual.getDocCount(), equalTo((long)expected.count)); + assertThat("doc counts do not match", actual.getDocCount(), equalTo((long) expected.count)); assertBucketContents(actual, expectedCount, expectedValue); } @@ -263,16 +265,15 @@ public void testBasicDiff() { public void testInvalidLagSize() { try { - client() - .prepareSearch("idx") + client().prepareSearch("idx") .addAggregation( - histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) - .subAggregation(metric) - .subAggregation(diff("diff_counts", "_count") - .lag(-1) - .gapPolicy(gapPolicy)) - ).get(); + histogram("histo").field(INTERVAL_FIELD) + .interval(interval) + .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .subAggregation(metric) + .subAggregation(diff("diff_counts", "_count").lag(-1).gapPolicy(gapPolicy)) + ) + .get(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is("[lag] must be a positive integer: [diff_counts]")); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java index b95895c21779e..ab2700005e785 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java @@ -47,8 +47,7 @@ public class StatsBucketIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping("tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -64,17 +63,26 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); - builders.add(client().prepareIndex("idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval)) - .endObject())); + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, fieldValue) + .field("tag", "tag" + (i % interval)) + .endObject() + ) + ); final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1); valueCounts[bucket]++; } assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); @@ -82,9 +90,11 @@ public void setupSuiteScopeCluster() throws Exception { public void testDocCountTopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .addAggregation(statsBucket("stats_bucket", "histo>_count")).get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .addAggregation(statsBucket("stats_bucket", "histo>_count")) + .get(); assertSearchResponse(response); @@ -119,16 +129,16 @@ public void testDocCountTopLevel() throws Exception { } public void testDocCountAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(statsBucket("stats_bucket", "histo>_count"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(statsBucket("stats_bucket", "histo>_count")) + ) + .get(); assertSearchResponse(response); @@ -173,10 +183,10 @@ public void testDocCountAsSubAgg() throws Exception { } public void testMetricTopLevel() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(statsBucket("stats_bucket", "terms>sum")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(statsBucket("stats_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -213,17 +223,19 @@ public void testMetricTopLevel() throws Exception { } public void testMetricAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(statsBucket("stats_bucket", "histo>sum"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(statsBucket("stats_bucket", "histo>sum")) + ) + .get(); assertSearchResponse(response); @@ -272,18 +284,19 @@ public void testMetricAsSubAgg() throws Exception { } public void testMetricAsSubAggWithInsertZeros() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(statsBucket("stats_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(statsBucket("stats_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS)) + ) + .get(); assertSearchResponse(response); @@ -332,9 +345,13 @@ public void testMetricAsSubAggWithInsertZeros() throws Exception { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(statsBucket("stats_bucket", "terms>sum")).get(); + .addAggregation( + terms("terms").field("tag") + .includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .addAggregation(statsBucket("stats_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -351,17 +368,17 @@ public void testNoBuckets() throws Exception { } public void testNested() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(statsBucket("avg_histo_bucket", "histo>_count"))) - .addAggregation(statsBucket("avg_terms_bucket", "terms>avg_histo_bucket.avg")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(statsBucket("avg_histo_bucket", "histo>_count")) + ) + .addAggregation(statsBucket("avg_terms_bucket", "terms>avg_histo_bucket.avg")) + .get(); assertSearchResponse(response); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java index 355c8702922ad..6cf7799ca2508 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java @@ -47,8 +47,7 @@ public class SumBucketIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx") - .setMapping("tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -64,17 +63,26 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); - builders.add(client().prepareIndex("idx").setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval)) - .endObject())); + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, fieldValue) + .field("tag", "tag" + (i % interval)) + .endObject() + ) + ); final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1); valueCounts[bucket]++; } assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { - builders.add(client().prepareIndex("empty_bucket_idx").setId("" + i).setSource( - jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject())); + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); } indexRandom(true, builders); ensureSearchable(); @@ -82,9 +90,11 @@ public void setupSuiteScopeCluster() throws Exception { public void testDocCountTopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .addAggregation(sumBucket("sum_bucket", "histo>_count")).get(); + .addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .addAggregation(sumBucket("sum_bucket", "histo>_count")) + .get(); assertSearchResponse(response); @@ -110,16 +120,16 @@ public void testDocCountTopLevel() throws Exception { } public void testDocCountAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(sumBucket("sum_bucket", "histo>_count"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(sumBucket("sum_bucket", "histo>_count")) + ) + .get(); assertSearchResponse(response); @@ -155,10 +165,10 @@ public void testDocCountAsSubAgg() throws Exception { } public void testMetricTopLevel() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(sumBucket("sum_bucket", "terms>sum")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(sumBucket("sum_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -186,17 +196,19 @@ public void testMetricTopLevel() throws Exception { } public void testMetricAsSubAgg() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(sumBucket("sum_bucket", "histo>sum"))).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(sumBucket("sum_bucket", "histo>sum")) + ) + .get(); assertSearchResponse(response); @@ -236,18 +248,19 @@ public void testMetricAsSubAgg() throws Exception { } public void testMetricAsSubAggWithInsertZeros() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(sumBucket("sum_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) - .get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(sumBucket("sum_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS)) + ) + .get(); assertSearchResponse(response); @@ -287,9 +300,13 @@ public void testMetricAsSubAggWithInsertZeros() throws Exception { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(sumBucket("sum_bucket", "terms>sum")).get(); + .addAggregation( + terms("terms").field("tag") + .includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .addAggregation(sumBucket("sum_bucket", "terms>sum")) + .get(); assertSearchResponse(response); @@ -306,17 +323,17 @@ public void testNoBuckets() throws Exception { } public void testNested() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms") - .field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(minRandomValue, maxRandomValue)) - .subAggregation(sumBucket("sum_histo_bucket", "histo>_count"))) - .addAggregation(sumBucket("sum_terms_bucket", "terms>sum_histo_bucket")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(sumBucket("sum_histo_bucket", "histo>_count")) + ) + .addAggregation(sumBucket("sum_terms_bucket", "terms>sum_histo_bucket")) + .get(); assertSearchResponse(response); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AbstractAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/AbstractAggregationBuilder.java index 6504b3539987b..b28d45a851268 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AbstractAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AbstractAggregationBuilder.java @@ -20,8 +20,7 @@ /** * Base implementation of a {@link AggregationBuilder}. */ -public abstract class AbstractAggregationBuilder> - extends AggregationBuilder { +public abstract class AbstractAggregationBuilder> extends AggregationBuilder { protected Map metadata; @@ -34,9 +33,11 @@ public AbstractAggregationBuilder(String name) { super(name); } - protected AbstractAggregationBuilder(AbstractAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + protected AbstractAggregationBuilder( + AbstractAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder); this.metadata = metadata; } @@ -130,8 +131,11 @@ public final AggregatorFactory build(AggregationContext context, AggregatorFacto return factory; } - protected abstract AggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subfactoriesBuilder) throws IOException; + protected abstract AggregatorFactory doBuild( + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subfactoriesBuilder + ) throws IOException; @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java index 0c593c423f7a6..3794b7526f4e5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java @@ -7,10 +7,9 @@ */ package org.elasticsearch.search.aggregations; - -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryRewriteContext; @@ -29,7 +28,11 @@ * A factory that knows how to create an {@link Aggregator} of a specific type. */ public abstract class AggregationBuilder - implements NamedWriteable, ToXContentFragment, BaseAggregationBuilder, Rewriteable { + implements + NamedWriteable, + ToXContentFragment, + BaseAggregationBuilder, + Rewriteable { public static final long DEFAULT_PREALLOCATION = 1024 * 6; protected final String name; @@ -154,8 +157,11 @@ public PipelineTree buildPipelineTree() { * instead of per parent bucket. */ public enum BucketCardinality { - NONE, ONE, MANY; + NONE, + ONE, + MANY; } + /** * A rough count of the number of buckets that {@link Aggregator}s built * by this builder will contain per owning parent bucket. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java index 2343b12fbfb55..73201770a10e8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java @@ -19,10 +19,10 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filters; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator.KeyedFilter; -import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoTileGrid; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid; +import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoTileGrid; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; @@ -51,12 +51,16 @@ import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.Cardinality; import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.GeoBounds; import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.GeoCentroid; import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MedianAbsoluteDeviation; +import org.elasticsearch.search.aggregations.metrics.MedianAbsoluteDeviationAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.PercentileRanks; @@ -67,8 +71,6 @@ import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ExtendedStats; -import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.TopHits; @@ -76,8 +78,6 @@ import org.elasticsearch.search.aggregations.metrics.ValueCount; import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.MedianAbsoluteDeviationAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.MedianAbsoluteDeviation; import java.util.List; import java.util.Map; @@ -87,8 +87,7 @@ */ public class AggregationBuilders { - private AggregationBuilders() { - } + private AggregationBuilders() {} /** * Create a new {@link ValueCount} aggregation with the given name. @@ -177,7 +176,7 @@ public static AdjacencyMatrixAggregationBuilder adjacencyMatrix(String name, Map /** * Create a new {@link AdjacencyMatrix} aggregation with the given name and separator */ - public static AdjacencyMatrixAggregationBuilder adjacencyMatrix(String name, String separator, Map filters) { + public static AdjacencyMatrixAggregationBuilder adjacencyMatrix(String name, String separator, Map filters) { return new AdjacencyMatrixAggregationBuilder(name, separator, filters); } @@ -258,7 +257,6 @@ public static SignificantTermsAggregationBuilder significantTerms(String name) { return new SignificantTermsAggregationBuilder(name); } - /** * Create a new {@link SignificantTextAggregationBuilder} aggregation with the given name and text field name */ @@ -266,7 +264,6 @@ public static SignificantTextAggregationBuilder significantText(String name, Str return new SignificantTextAggregationBuilder(name, fieldName); } - /** * Create a new {@link DateHistogramAggregationBuilder} aggregation with the given * name. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionException.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionException.java index 74e42eaf17d1e..2b83979673e3b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionException.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionException.java @@ -25,7 +25,7 @@ public AggregationExecutionException(String msg, Throwable cause) { super(msg, cause); } - public AggregationExecutionException(StreamInput in) throws IOException{ + public AggregationExecutionException(StreamInput in) throws IOException { super(in); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationInitializationException.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationInitializationException.java index acd208946fbab..f477cd884f79a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationInitializationException.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationInitializationException.java @@ -25,7 +25,7 @@ public AggregationInitializationException(String msg, Throwable cause) { super(msg, cause); } - public AggregationInitializationException(StreamInput in) throws IOException{ + public AggregationInitializationException(StreamInput in) throws IOException { super(in); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index 25a1d62da2cbe..9116cdc9d1634 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -23,8 +23,7 @@ public class AggregationPhase { @Inject - public AggregationPhase() { - } + public AggregationPhase() {} public void preProcess(SearchContext context) { if (context.aggregations() == null) { @@ -38,8 +37,9 @@ public void preProcess(SearchContext context) { } catch (IOException e) { throw new AggregationInitializationException("Could not initialize aggregators", e); } - Collector collector = context.getProfilers() == null ? - bucketCollector : new InternalProfileCollector(bucketCollector, CollectorResult.REASON_AGGREGATION, List.of()); + Collector collector = context.getProfilers() == null + ? bucketCollector + : new InternalProfileCollector(bucketCollector, CollectorResult.REASON_AGGREGATION, List.of()); context.queryCollectors().put(AggregationPhase.class, collector); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java index ee6f9d610c994..ad110e118b2a2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java @@ -132,8 +132,10 @@ public static Aggregations fromXContent(XContentParser parser) throws IOExceptio if (typedAgg.get() != null) { aggregations.add(typedAgg.get()); } else { - throw new ParsingException(parser.getTokenLocation(), - String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField)); + throw new ParsingException( + parser.getTokenLocation(), + String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField) + ); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java index 1162d6f1f0df9..57108011ee856 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java @@ -9,14 +9,14 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.Releasable; import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.core.Releasable; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.sort.SortOrder; @@ -79,8 +79,12 @@ public interface Parser { public final Aggregator resolveSortPathOnValidAgg(AggregationPath.PathElement next, Iterator path) { Aggregator n = subAggregator(next.name); if (n == null) { - throw new IllegalArgumentException("The provided aggregation [" + next + "] either does not exist, or is " - + "a pipeline aggregation and cannot be used to sort the buckets."); + throw new IllegalArgumentException( + "The provided aggregation [" + + next + + "] either does not exist, or is " + + "a pipeline aggregation and cannot be used to sort the buckets." + ); } if (false == path.hasNext()) { return n; @@ -97,9 +101,13 @@ public final Aggregator resolveSortPathOnValidAgg(AggregationPath.PathElement ne * The default implementation throws an exception but we override it on aggregations that support sorting. */ public Aggregator resolveSortPath(AggregationPath.PathElement next, Iterator path) { - throw new IllegalArgumentException("Buckets can only be sorted on a sub-aggregator path " + - "that is built out of zero or more single-bucket aggregations within the path and a final " + - "single-bucket or a metrics aggregation at the path end. [" + name() + "] is not single-bucket."); + throw new IllegalArgumentException( + "Buckets can only be sorted on a sub-aggregator path " + + "that is built out of zero or more single-bucket aggregations within the path and a final " + + "single-bucket or a metrics aggregation at the path end. [" + + name() + + "] is not single-bucket." + ); } /** @@ -108,10 +116,13 @@ public Aggregator resolveSortPath(AggregationPath.PathElement next, Iterator metadata) throws IOException { + protected AggregatorBase( + String name, + AggregatorFactories factories, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound subAggregatorCardinality, + Map metadata + ) throws IOException { this.name = name; this.metadata = metadata; this.parent = parent; @@ -68,9 +74,10 @@ protected AggregatorBase(String name, AggregatorFactories factories, Aggregation context.addReleasable(this); // Register a safeguard to highlight any invalid construction logic (call to this constructor without subsequent preCollection call) collectableSubAggregators = new BucketCollector() { - void badState(){ + void badState() { throw new IllegalStateException("preCollection not called on new Aggregator before use"); } + @Override public LeafBucketCollector getLeafCollector(LeafReaderContext reader) { badState(); @@ -87,6 +94,7 @@ public void preCollection() throws IOException { public void postCollection() throws IOException { badState(); } + @Override public ScoreMode scoreMode() { badState(); @@ -138,6 +146,7 @@ protected long addRequestCircuitBreakerBytes(long bytes) { this.requestBytesUsed += bytes; return requestBytesUsed; } + /** * Most aggregators don't need scores, make sure to extend this method if * your aggregator needs them. @@ -211,14 +220,12 @@ public final LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws * Can be overridden by aggregator implementations that like the perform an operation before the leaf collectors * of children aggregators are instantiated for the next segment. */ - protected void preGetSubLeafCollectors(LeafReaderContext ctx) throws IOException { - } + protected void preGetSubLeafCollectors(LeafReaderContext ctx) throws IOException {} /** * Can be overridden by aggregator implementation to be called back when the collection phase starts. */ - protected void doPreCollection() throws IOException { - } + protected void doPreCollection() throws IOException {} @Override public final void preCollection() throws IOException { @@ -291,8 +298,7 @@ protected void doClose() {} /** * Can be overridden by aggregator implementation to be called back when the collection phase ends. */ - protected void doPostCollection() throws IOException { - } + protected void doPostCollection() throws IOException {} protected final InternalAggregations buildEmptySubAggregations() { List aggs = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java index 8b4f97495dbc9..3246151b9e3e9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java @@ -69,19 +69,33 @@ private static AggregatorFactories.Builder parseAggregators(XContentParser parse XContentParser.Token token = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token != XContentParser.Token.FIELD_NAME) { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " in [aggs]: aggregations definitions must start with the name of the aggregation."); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + token + " in [aggs]: aggregations definitions must start with the name of the aggregation." + ); } final String aggregationName = parser.currentName(); if (validAggMatcher.reset(aggregationName).matches() == false) { - throw new ParsingException(parser.getTokenLocation(), "Invalid aggregation name [" + aggregationName - + "]. Aggregation names can contain any character except '[', ']', and '>'"); + throw new ParsingException( + parser.getTokenLocation(), + "Invalid aggregation name [" + + aggregationName + + "]. Aggregation names can contain any character except '[', ']', and '>'" + ); } token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { - throw new ParsingException(parser.getTokenLocation(), "Aggregation definition for [" + aggregationName + " starts with a [" - + token + "], expected a [" + XContentParser.Token.START_OBJECT + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Aggregation definition for [" + + aggregationName + + " starts with a [" + + token + + "], expected a [" + + XContentParser.Token.START_OBJECT + + "]." + ); } BaseAggregationBuilder aggBuilder = null; @@ -92,49 +106,85 @@ private static AggregatorFactories.Builder parseAggregators(XContentParser parse while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token != XContentParser.Token.FIELD_NAME) { throw new ParsingException( - parser.getTokenLocation(), "Expected [" + XContentParser.Token.FIELD_NAME + "] under a [" - + XContentParser.Token.START_OBJECT + "], but got a [" + token + "] in [" + aggregationName + "]", - parser.getTokenLocation()); + parser.getTokenLocation(), + "Expected [" + + XContentParser.Token.FIELD_NAME + + "] under a [" + + XContentParser.Token.START_OBJECT + + "], but got a [" + + token + + "] in [" + + aggregationName + + "]", + parser.getTokenLocation() + ); } final String fieldName = parser.currentName(); token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { switch (fieldName) { - case "meta": - metadata = parser.map(); - break; - case "aggregations": - case "aggs": - if (subFactories != null) { - throw new ParsingException(parser.getTokenLocation(), - "Found two sub aggregation definitions under [" + aggregationName + "]"); - } - subFactories = parseAggregators(parser, level + 1); - break; - default: - if (aggBuilder != null) { - throw new ParsingException(parser.getTokenLocation(), "Found two aggregation type definitions in [" - + aggregationName + "]: [" + aggBuilder.getType() + "] and [" + fieldName + "]"); - } + case "meta": + metadata = parser.map(); + break; + case "aggregations": + case "aggs": + if (subFactories != null) { + throw new ParsingException( + parser.getTokenLocation(), + "Found two sub aggregation definitions under [" + aggregationName + "]" + ); + } + subFactories = parseAggregators(parser, level + 1); + break; + default: + if (aggBuilder != null) { + throw new ParsingException( + parser.getTokenLocation(), + "Found two aggregation type definitions in [" + + aggregationName + + "]: [" + + aggBuilder.getType() + + "] and [" + + fieldName + + "]" + ); + } - try { - aggBuilder = parser.namedObject(BaseAggregationBuilder.class, fieldName, aggregationName); - } catch (NamedObjectNotFoundException ex) { - String message = String.format(Locale.ROOT, "Unknown aggregation type [%s]%s", fieldName, - SuggestingErrorOnUnknown.suggest(fieldName, ex.getCandidates())); - throw new ParsingException(new XContentLocation(ex.getLineNumber(), ex.getColumnNumber()), message, ex); - } + try { + aggBuilder = parser.namedObject(BaseAggregationBuilder.class, fieldName, aggregationName); + } catch (NamedObjectNotFoundException ex) { + String message = String.format( + Locale.ROOT, + "Unknown aggregation type [%s]%s", + fieldName, + SuggestingErrorOnUnknown.suggest(fieldName, ex.getCandidates()) + ); + throw new ParsingException(new XContentLocation(ex.getLineNumber(), ex.getColumnNumber()), message, ex); + } } } else { - throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] under [" - + fieldName + "], but got a [" + token + "] in [" + aggregationName + "]"); + throw new ParsingException( + parser.getTokenLocation(), + "Expected [" + + XContentParser.Token.START_OBJECT + + "] under [" + + fieldName + + "], but got a [" + + token + + "] in [" + + aggregationName + + "]" + ); } } if (aggBuilder == null) { - throw new ParsingException(parser.getTokenLocation(), "Missing definition for aggregation [" + aggregationName + "]", - parser.getTokenLocation()); + throw new ParsingException( + parser.getTokenLocation(), + "Missing definition for aggregation [" + aggregationName + "]", + parser.getTokenLocation() + ); } else { if (metadata != null) { aggBuilder.setMetadata(metadata); @@ -180,8 +230,7 @@ public AggregationContext context() { * that {@link Aggregator}s created by this method will * be asked to collect. */ - public Aggregator[] createSubAggregators(Aggregator parent, CardinalityUpperBound cardinality) - throws IOException { + public Aggregator[] createSubAggregators(Aggregator parent, CardinalityUpperBound cardinality) throws IOException { Aggregator[] aggregators = new Aggregator[countAggregators()]; for (int i = 0; i < factories.length; ++i) { aggregators[i] = context.profileIfEnabled(factories[i].create(parent, cardinality)); @@ -217,8 +266,7 @@ public AggregatorFactories fixParent(Aggregator fixedParent) { AggregatorFactories previous = this; return new AggregatorFactories(context, factories) { @Override - public Aggregator[] createSubAggregators(Aggregator parent, CardinalityUpperBound cardinality) - throws IOException { + public Aggregator[] createSubAggregators(Aggregator parent, CardinalityUpperBound cardinality) throws IOException { // Note that we're throwing out the "parent" passed in to this method and using the parent passed to fixParent return previous.createSubAggregators(fixedParent, cardinality); } @@ -240,8 +288,7 @@ public static class Builder implements Writeable, ToXContentObject { /** * Create an empty builder. */ - public Builder() { - } + public Builder() {} /** * Read from a stream. @@ -300,8 +347,11 @@ public Builder addPipelineAggregator(PipelineAggregationBuilder pipelineAggregat * Validate the root of the aggregation tree. */ public ActionRequestValidationException validate(ActionRequestValidationException e) { - PipelineAggregationBuilder.ValidationContext context = - PipelineAggregationBuilder.ValidationContext.forTreeRoot(aggregationBuilders, pipelineAggregatorBuilders, e); + PipelineAggregationBuilder.ValidationContext context = PipelineAggregationBuilder.ValidationContext.forTreeRoot( + aggregationBuilders, + pipelineAggregatorBuilders, + e + ); validatePipelines(context); return validateChildren(context.getValidationException()); } @@ -327,8 +377,7 @@ private void validatePipelines(PipelineAggregationBuilder.ValidationContext cont */ private ActionRequestValidationException validateChildren(ActionRequestValidationException e) { for (AggregationBuilder agg : aggregationBuilders) { - PipelineAggregationBuilder.ValidationContext context = - PipelineAggregationBuilder.ValidationContext.forInsideTree(agg, e); + PipelineAggregationBuilder.ValidationContext context = PipelineAggregationBuilder.ValidationContext.forInsideTree(agg, e); agg.factoriesBuilder.validatePipelines(context); e = agg.factoriesBuilder.validateChildren(context.getValidationException()); } @@ -349,7 +398,9 @@ public AggregatorFactories build(AggregationContext context, AggregatorFactory p } private List resolvePipelineAggregatorOrder( - Collection pipelineAggregatorBuilders, Collection aggregationBuilders) { + Collection pipelineAggregatorBuilders, + Collection aggregationBuilders + ) { Map pipelineAggregatorBuildersMap = new HashMap<>(); for (PipelineAggregationBuilder builder : pipelineAggregatorBuilders) { pipelineAggregatorBuildersMap.put(builder.getName(), builder); @@ -363,16 +414,26 @@ private List resolvePipelineAggregatorOrder( Collection temporarilyMarked = new HashSet<>(); while (unmarkedBuilders.isEmpty() == false) { PipelineAggregationBuilder builder = unmarkedBuilders.get(0); - resolvePipelineAggregatorOrder(aggBuildersMap, pipelineAggregatorBuildersMap, orderedPipelineAggregatorrs, unmarkedBuilders, - temporarilyMarked, builder); + resolvePipelineAggregatorOrder( + aggBuildersMap, + pipelineAggregatorBuildersMap, + orderedPipelineAggregatorrs, + unmarkedBuilders, + temporarilyMarked, + builder + ); } return orderedPipelineAggregatorrs; } - private void resolvePipelineAggregatorOrder(Map aggBuildersMap, - Map pipelineAggregatorBuildersMap, - List orderedPipelineAggregators, List unmarkedBuilders, - Collection temporarilyMarked, PipelineAggregationBuilder builder) { + private void resolvePipelineAggregatorOrder( + Map aggBuildersMap, + Map pipelineAggregatorBuildersMap, + List orderedPipelineAggregators, + List unmarkedBuilders, + Collection temporarilyMarked, + PipelineAggregationBuilder builder + ) { if (temporarilyMarked.contains(builder)) { throw new IllegalArgumentException("Cyclical dependency found with pipeline aggregator [" + builder.getName() + "]"); } else if (unmarkedBuilders.contains(builder)) { @@ -404,8 +465,8 @@ private void resolvePipelineAggregatorOrder(Map aggB } // Check the pipeline sub-aggregator factories if (foundSubBuilder == false && (i == bucketsPathElements.size() - 1)) { - Collection subPipelineBuilders = aggBuilder.factoriesBuilder - .pipelineAggregatorBuilders; + Collection subPipelineBuilders = + aggBuilder.factoriesBuilder.pipelineAggregatorBuilders; for (PipelineAggregationBuilder subFactory : subPipelineBuilders) { if (aggName.equals(subFactory.getName())) { foundSubBuilder = true; @@ -414,8 +475,9 @@ private void resolvePipelineAggregatorOrder(Map aggB } } if (foundSubBuilder == false) { - throw new IllegalArgumentException("No aggregation [" + aggName + "] found for path [" + bucketsPath - + "]"); + throw new IllegalArgumentException( + "No aggregation [" + aggName + "] found for path [" + bucketsPath + "]" + ); } } } @@ -423,8 +485,14 @@ private void resolvePipelineAggregatorOrder(Map aggB } else { PipelineAggregationBuilder matchingBuilder = pipelineAggregatorBuildersMap.get(firstAggName); if (matchingBuilder != null) { - resolvePipelineAggregatorOrder(aggBuildersMap, pipelineAggregatorBuildersMap, orderedPipelineAggregators, - unmarkedBuilders, temporarilyMarked, matchingBuilder); + resolvePipelineAggregatorOrder( + aggBuildersMap, + pipelineAggregatorBuildersMap, + orderedPipelineAggregators, + unmarkedBuilders, + temporarilyMarked, + matchingBuilder + ); } else { throw new IllegalArgumentException("No aggregation found for path [" + bucketsPath + "]"); } @@ -489,16 +557,12 @@ public int hashCode() { @Override public boolean equals(Object obj) { - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; Builder other = (Builder) obj; - if (Objects.equals(aggregationBuilders, other.aggregationBuilders) == false) - return false; - if (Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders) == false) - return false; + if (Objects.equals(aggregationBuilders, other.aggregationBuilders) == false) return false; + if (Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders) == false) return false; return true; } @@ -534,11 +598,10 @@ public PipelineTree buildPipelineTree() { return PipelineTree.EMPTY; } Map subTrees = aggregationBuilders.stream() - .collect(toMap(AggregationBuilder::getName, AggregationBuilder::buildPipelineTree)); - List aggregators = resolvePipelineAggregatorOrder(pipelineAggregatorBuilders, aggregationBuilders) - .stream() - .map(PipelineAggregationBuilder::create) - .collect(toList()); + .collect(toMap(AggregationBuilder::getName, AggregationBuilder::buildPipelineTree)); + List aggregators = resolvePipelineAggregatorOrder(pipelineAggregatorBuilders, aggregationBuilders).stream() + .map(PipelineAggregationBuilder::create) + .collect(toList()); return new PipelineTree(subTrees, aggregators); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java index 4c7b3f777b5cf..0bfa507afa782 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java @@ -31,8 +31,13 @@ public abstract class AggregatorFactory { * @throws IOException * if an error occurs creating the factory */ - public AggregatorFactory(String name, AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, Map metadata) throws IOException { + public AggregatorFactory( + String name, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { this.name = name; this.context = context; this.parent = parent; @@ -44,8 +49,7 @@ public String name() { return name; } - public void doValidate() { - } + public void doValidate() {} protected abstract Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java index ac42deb67f3df..8a5eee01799df 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Collector; import org.apache.lucene.search.ScoreMode; @@ -26,14 +25,17 @@ public abstract class BucketCollector implements Collector { public LeafBucketCollector getLeafCollector(LeafReaderContext reader) { return LeafBucketCollector.NO_OP_COLLECTOR; } + @Override public void preCollection() throws IOException { // no-op } + @Override public void postCollection() throws IOException { // no-op } + @Override public ScoreMode scoreMode() { return ScoreMode.COMPLETE_NO_SCORES; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java b/server/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java index 4eb472c305014..6be4699c62c29 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java @@ -94,7 +94,7 @@ public static BucketOrder compound(BucketOrder... orders) { * @throws AggregationExecutionException when the ordering is invalid * for this {@linkplain Aggregator}. */ - public final void validate(Aggregator aggregator) throws AggregationExecutionException{ + public final void validate(Aggregator aggregator) throws AggregationExecutionException { /* * Building partiallyBuiltBucketComparator and throwing it away is enough * to validate this order because doing so checks all of the appropriate diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java index 0d2eb9629801b..5724fae9dbf70 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java @@ -43,11 +43,13 @@ public interface ReduceContextBuilder { * Build a {@linkplain ReduceContext} to perform a partial reduction. */ ReduceContext forPartialReduction(); + /** * Build a {@linkplain ReduceContext} to perform the final reduction. */ ReduceContext forFinalReduction(); } + public static class ReduceContext { private final BigArrays bigArrays; private final ScriptService scriptService; @@ -63,8 +65,11 @@ public static class ReduceContext { /** * Build a {@linkplain ReduceContext} to perform a partial reduction. */ - public static ReduceContext forPartialReduction(BigArrays bigArrays, ScriptService scriptService, - Supplier pipelineTreeForBwcSerialization) { + public static ReduceContext forPartialReduction( + BigArrays bigArrays, + ScriptService scriptService, + Supplier pipelineTreeForBwcSerialization + ) { return new ReduceContext(bigArrays, scriptService, (s) -> {}, null, pipelineTreeForBwcSerialization); } @@ -72,14 +77,28 @@ public static ReduceContext forPartialReduction(BigArrays bigArrays, ScriptServi * Build a {@linkplain ReduceContext} to perform the final reduction. * @param pipelineTreeRoot The root of tree of pipeline aggregations for this request */ - public static ReduceContext forFinalReduction(BigArrays bigArrays, ScriptService scriptService, - IntConsumer multiBucketConsumer, PipelineTree pipelineTreeRoot) { - return new ReduceContext(bigArrays, scriptService, multiBucketConsumer, - requireNonNull(pipelineTreeRoot, "prefer EMPTY to null"), () -> pipelineTreeRoot); + public static ReduceContext forFinalReduction( + BigArrays bigArrays, + ScriptService scriptService, + IntConsumer multiBucketConsumer, + PipelineTree pipelineTreeRoot + ) { + return new ReduceContext( + bigArrays, + scriptService, + multiBucketConsumer, + requireNonNull(pipelineTreeRoot, "prefer EMPTY to null"), + () -> pipelineTreeRoot + ); } - private ReduceContext(BigArrays bigArrays, ScriptService scriptService, IntConsumer multiBucketConsumer, - PipelineTree pipelineTreeRoot, Supplier pipelineTreeForBwcSerialization) { + private ReduceContext( + BigArrays bigArrays, + ScriptService scriptService, + IntConsumer multiBucketConsumer, + PipelineTree pipelineTreeRoot, + Supplier pipelineTreeForBwcSerialization + ) { this.bigArrays = bigArrays; this.scriptService = scriptService; this.multiBucketConsumer = multiBucketConsumer; @@ -186,7 +205,8 @@ public String getName() { */ public InternalAggregation copyWithRewritenBuckets(Function rewriter) { throw new IllegalStateException( - "Aggregation [" + getName() + "] must be a bucket aggregation but was [" + getWriteableName() + "]"); + "Aggregation [" + getName() + "] must be a bucket aggregation but was [" + getWriteableName() + "]" + ); } /** @@ -199,7 +219,10 @@ public void forEachBucket(Consumer consumer) {} * be called after all aggregations have been fully reduced */ public InternalAggregation reducePipelines( - InternalAggregation reducedAggs, ReduceContext reduceContext, PipelineTree pipelinesForThisAgg) { + InternalAggregation reducedAggs, + ReduceContext reduceContext, + PipelineTree pipelinesForThisAgg + ) { assert reduceContext.isFinalReduce(); for (PipelineAggregator pipelineAggregator : pipelinesForThisAgg.aggregators()) { reducedAggs = pipelineAggregator.reduce(reducedAggs, reduceContext); @@ -302,11 +325,12 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) { return false; } - if (obj == this) { return true; } + if (obj == this) { + return true; + } InternalAggregation other = (InternalAggregation) obj; - return Objects.equals(name, other.name) && - Objects.equals(metadata, other.metadata); + return Objects.equals(name, other.name) && Objects.equals(metadata, other.metadata); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java index 1303b4e32bb8a..61e8f03ee026f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java @@ -137,8 +137,10 @@ public static InternalAggregations reduce(List aggregation for (InternalAggregations aggregations : aggregationsList) { for (Aggregation aggregation : aggregations.aggregations) { List aggs = aggByName.computeIfAbsent( - aggregation.getName(), k -> new ArrayList<>(aggregationsList.size())); - aggs.add((InternalAggregation)aggregation); + aggregation.getName(), + k -> new ArrayList<>(aggregationsList.size()) + ); + aggs.add((InternalAggregation) aggregation); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java index 2a14fc12c28f1..b44d8db247bdd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java @@ -22,9 +22,9 @@ import java.util.function.Function; @SuppressWarnings("rawtypes") -public abstract class InternalMultiBucketAggregation - extends InternalAggregation implements MultiBucketsAggregation { +public abstract class InternalMultiBucketAggregation< + A extends InternalMultiBucketAggregation, + B extends InternalMultiBucketAggregation.InternalBucket> extends InternalAggregation implements MultiBucketsAggregation { public InternalMultiBucketAggregation(String name, Map metadata) { super(name, metadata); @@ -120,7 +120,7 @@ public static int countInnerBucket(Aggregation agg) { if (agg instanceof MultiBucketsAggregation) { MultiBucketsAggregation multi = (MultiBucketsAggregation) agg; for (MultiBucketsAggregation.Bucket bucket : multi.getBuckets()) { - ++ size; + ++size; for (Aggregation bucketAgg : bucket.getAggregations().asList()) { size += countInnerBucket(bucketAgg); } @@ -140,7 +140,10 @@ public static int countInnerBucket(Aggregation agg) { */ @Override public final InternalAggregation reducePipelines( - InternalAggregation reducedAggs, ReduceContext reduceContext, PipelineTree pipelineTree) { + InternalAggregation reducedAggs, + ReduceContext reduceContext, + PipelineTree pipelineTree + ) { assert reduceContext.isFinalReduce(); InternalAggregation reduced = this; if (pipelineTree.hasSubTrees()) { @@ -185,7 +188,7 @@ private List reducePipelineBuckets(ReduceContext reduceContext, PipelineTree List aggs = new ArrayList<>(); for (Aggregation agg : bucket.getAggregations()) { PipelineTree subTree = pipelineTree.subTree(agg.getName()); - aggs.add(((InternalAggregation)agg).reducePipelines((InternalAggregation)agg, reduceContext, subTree)); + aggs.add(((InternalAggregation) agg).reducePipelines((InternalAggregation) agg, reduceContext, subTree)); } reducedBuckets.add(createBucket(InternalAggregations.from(aggs), bucket)); } @@ -213,8 +216,9 @@ public Object getProperty(String containingAggName, List path) { } InternalAggregation aggregation = aggregations.get(aggName); if (aggregation == null) { - throw new InvalidAggregationPathException("Cannot find an aggregation named [" + aggName + "] in [" + containingAggName - + "]"); + throw new InvalidAggregationPathException( + "Cannot find an aggregation named [" + aggName + "] in [" + containingAggName + "]" + ); } return aggregation.getProperty(path.subList(1, path.size())); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java index 5578a02dbe3bf..57f92d785dbed 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java @@ -112,8 +112,7 @@ public boolean equals(Object obj) { return false; } Aggregation other = (Aggregation) obj; - return Objects.equals(path, other.path) - && Objects.equals(order, other.order); + return Objects.equals(path, other.path) && Objects.equals(order, other.order); } } @@ -186,8 +185,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public Comparator partiallyBuiltBucketComparator(ToLongFunction ordinalReader, Aggregator aggregator) { List> comparators = orderElements.stream() - .map(oe -> oe.partiallyBuiltBucketComparator(ordinalReader, aggregator)) - .collect(toList()); + .map(oe -> oe.partiallyBuiltBucketComparator(ordinalReader, aggregator)) + .collect(toList()); return (lhs, rhs) -> { for (Comparator c : comparators) { int result = c.compare(lhs, rhs); @@ -313,9 +312,7 @@ public boolean equals(Object obj) { return false; } SimpleOrder other = (SimpleOrder) obj; - return Objects.equals(id, other.id) - && Objects.equals(key, other.key) - && Objects.equals(order, other.order); + return Objects.equals(id, other.id) && Objects.equals(key, other.key) && Objects.equals(order, other.order); } } @@ -386,7 +383,8 @@ private static Comparator comparingKeys() { return ((KeyComparable) b1).compareKey(b2); } throw new IllegalStateException("Unexpected order bucket class [" + b1.getClass() + "]"); - }; } + }; + } /** * @return compare by {@link Bucket#getKey()} that will be in the bucket once it is reduced @@ -472,10 +470,14 @@ public static class Streams { public static BucketOrder readOrder(StreamInput in) throws IOException { byte id = in.readByte(); switch (id) { - case COUNT_DESC_ID: return COUNT_DESC; - case COUNT_ASC_ID: return COUNT_ASC; - case KEY_DESC_ID: return KEY_DESC; - case KEY_ASC_ID: return KEY_ASC; + case COUNT_DESC_ID: + return COUNT_DESC; + case COUNT_ASC_ID: + return COUNT_ASC; + case KEY_DESC_ID: + return KEY_DESC; + case KEY_ASC_ID: + return KEY_ASC; case Aggregation.ID: boolean asc = in.readBoolean(); String key = in.readString(); @@ -542,6 +544,7 @@ public static void writeHistogramOrder(BucketOrder order, StreamOutput out) thro */ public static class Parser { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(Parser.class); + /** * Parse a {@link BucketOrder} from {@link XContent}. * @@ -563,23 +566,22 @@ public static BucketOrder parseOrderParam(XContentParser parser) throws IOExcept } else if ("desc".equalsIgnoreCase(dir)) { orderAsc = false; } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown order direction [" + dir + "]"); + throw new ParsingException(parser.getTokenLocation(), "Unknown order direction [" + dir + "]"); } } else { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token [" + token + "] for [order]"); + throw new ParsingException(parser.getTokenLocation(), "Unexpected token [" + token + "] for [order]"); } } if (orderKey == null) { - throw new ParsingException(parser.getTokenLocation(), - "Must specify at least one field for [order]"); + throw new ParsingException(parser.getTokenLocation(), "Must specify at least one field for [order]"); } // _term and _time order deprecated in 6.0; replaced by _key - if (parser.getRestApiVersion() == RestApiVersion.V_7 && - ("_term".equals(orderKey) || "_time".equals(orderKey))) { - deprecationLogger.compatibleApiWarning("_term_and_time_key_removal" , - "Deprecated aggregation order key [{}] used, replaced by [_key]", orderKey); + if (parser.getRestApiVersion() == RestApiVersion.V_7 && ("_term".equals(orderKey) || "_time".equals(orderKey))) { + deprecationLogger.compatibleApiWarning( + "_term_and_time_key_removal", + "Deprecated aggregation order key [{}] used, replaced by [_key]", + orderKey + ); return orderAsc ? KEY_ASC : KEY_DESC; } switch (orderKey) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java index 10b4762c600d2..431fe2b851831 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java @@ -32,10 +32,12 @@ public abstract class LeafBucketCollector implements LeafCollector { public void setScorer(Scorable arg0) throws IOException { // no-op } + @Override public void collect(int doc, long bucket) { // no-op } + @Override public boolean isNoop() { return true; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java index e5a357938046d..28094c513acf2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java @@ -98,7 +98,8 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOExce } catch (CollectionTerminatedException e) { throw new IllegalStateException( "getLeafCollector should return a noop collector instead of throw " - + CollectionTerminatedException.class.getSimpleName(), e + + CollectionTerminatedException.class.getSimpleName(), + e ); } if (terminateIfNoop) { @@ -230,7 +231,7 @@ private void removeCollector(int i) { public void collect(int doc, long bucket) throws IOException { final LeafBucketCollector[] collectors = this.collectors; int numCollectors = this.numCollectors; - for (int i = 0; i < numCollectors; ) { + for (int i = 0; i < numCollectors;) { final LeafBucketCollector collector = collectors[i]; try { collector.collect(doc, bucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java index 3e2bd91298fef..91e09e5013910 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java @@ -28,8 +28,13 @@ */ public class MultiBucketConsumerService { public static final int DEFAULT_MAX_BUCKETS = 65536; - public static final Setting MAX_BUCKET_SETTING = - Setting.intSetting("search.max_buckets", DEFAULT_MAX_BUCKETS, 0, Setting.Property.NodeScope, Setting.Property.Dynamic); + public static final Setting MAX_BUCKET_SETTING = Setting.intSetting( + "search.max_buckets", + DEFAULT_MAX_BUCKETS, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); private final CircuitBreaker breaker; @@ -103,9 +108,16 @@ public void accept(int value) { if (value != 0) { count += value; if (count > limit) { - throw new TooManyBucketsException("Trying to create too many buckets. Must be less than or equal to: [" + limit - + "] but was [" + count + "]. This limit can be set by changing the [" + - MAX_BUCKET_SETTING.getKey() + "] cluster level setting.", limit); + throw new TooManyBucketsException( + "Trying to create too many buckets. Must be less than or equal to: [" + + limit + + "] but was [" + + count + + "]. This limit can be set by changing the [" + + MAX_BUCKET_SETTING.getKey() + + "] cluster level setting.", + limit + ); } } // check parent circuit breaker every 1024 calls diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/ParsedAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/ParsedAggregation.java index 2d2f6f68af938..e12e0567c71e2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/ParsedAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/ParsedAggregation.java @@ -26,8 +26,11 @@ public abstract class ParsedAggregation implements Aggregation, ToXContentFragment { protected static void declareAggregationFields(AbstractObjectParser objectParser) { - objectParser.declareObject((parsedAgg, metadata) -> parsedAgg.metadata = Collections.unmodifiableMap(metadata), - (parser, context) -> parser.map(), InternalAggregation.CommonFields.META); + objectParser.declareObject( + (parsedAgg, metadata) -> parsedAgg.metadata = Collections.unmodifiableMap(metadata), + (parser, context) -> parser.map(), + InternalAggregation.CommonFields.META + ); } private String name; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/ParsedMultiBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/ParsedMultiBucketAggregation.java index 3b3f42cc3255e..76ca0a917fb5d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/ParsedMultiBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/ParsedMultiBucketAggregation.java @@ -9,11 +9,11 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.common.CheckedBiConsumer; -import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import java.io.IOException; @@ -23,8 +23,9 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -public abstract class ParsedMultiBucketAggregation - extends ParsedAggregation implements MultiBucketsAggregation { +public abstract class ParsedMultiBucketAggregation extends ParsedAggregation + implements + MultiBucketsAggregation { protected final List buckets = new ArrayList<>(); protected boolean keyed = false; @@ -135,11 +136,12 @@ protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOExcept return builder.field(CommonFields.KEY.getPreferredName(), getKey()); } - protected static B parseXContent(final XContentParser parser, - final boolean keyed, - final Supplier bucketSupplier, - final CheckedBiConsumer keyConsumer) - throws IOException { + protected static B parseXContent( + final XContentParser parser, + final boolean keyed, + final Supplier bucketSupplier, + final CheckedBiConsumer keyConsumer + ) throws IOException { final B bucket = bucketSupplier.get(); bucket.setKeyed(keyed); XContentParser.Token token = parser.currentToken(); @@ -165,8 +167,12 @@ protected static B parseXContent(final XContentParser p if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) { keyConsumer.accept(parser, bucket); } else { - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, - aggregations::add); + XContentParserUtils.parseTypedKeysObject( + parser, + Aggregation.TYPED_KEYS_DELIMITER, + Aggregation.class, + aggregations::add + ); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java index bfddfea954130..3f8fda7810349 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java @@ -70,25 +70,26 @@ public final String[] getBucketsPaths() { * Makes sure this builder is properly configured. */ protected abstract void validate(ValidationContext context); + public abstract static class ValidationContext { /** * Build the context for the root of the aggregation tree. */ - public static ValidationContext forTreeRoot(Collection siblingAggregations, - Collection siblingPipelineAggregations, - ActionRequestValidationException validationFailuresSoFar) { + public static ValidationContext forTreeRoot( + Collection siblingAggregations, + Collection siblingPipelineAggregations, + ActionRequestValidationException validationFailuresSoFar + ) { return new ForTreeRoot(siblingAggregations, siblingPipelineAggregations, validationFailuresSoFar); } /** * Build the context for a node inside the aggregation tree. */ - public static ValidationContext forInsideTree(AggregationBuilder parent, - ActionRequestValidationException validationFailuresSoFar) { + public static ValidationContext forInsideTree(AggregationBuilder parent, ActionRequestValidationException validationFailuresSoFar) { return new ForInsideTree(parent, validationFailuresSoFar); } - private ActionRequestValidationException e; private ValidationContext(ActionRequestValidationException validationFailuresSoFar) { @@ -99,9 +100,11 @@ private static class ForTreeRoot extends ValidationContext { private final Collection siblingAggregations; private final Collection siblingPipelineAggregations; - ForTreeRoot(Collection siblingAggregations, - Collection siblingPipelineAggregations, - ActionRequestValidationException validationFailuresSoFar) { + ForTreeRoot( + Collection siblingAggregations, + Collection siblingPipelineAggregations, + ActionRequestValidationException validationFailuresSoFar + ) { super(validationFailuresSoFar); this.siblingAggregations = Objects.requireNonNull(siblingAggregations); this.siblingPipelineAggregations = Objects.requireNonNull(siblingPipelineAggregations); @@ -124,8 +127,12 @@ public void validateHasParent(String type, String name) { @Override public void validateParentAggSequentiallyOrdered(String type, String name) { - addValidationError(type + " aggregation [" + name - + "] must have a histogram, date_histogram or auto_date_histogram as parent but doesn't have a parent"); + addValidationError( + type + + " aggregation [" + + name + + "] must have a histogram, date_histogram or auto_date_histogram as parent but doesn't have a parent" + ); } } @@ -157,20 +164,19 @@ public void validateParentAggSequentiallyOrdered(String type, String name) { if (parent instanceof HistogramAggregationBuilder) { HistogramAggregationBuilder histoParent = (HistogramAggregationBuilder) parent; if (histoParent.minDocCount() != 0) { - addValidationError( - "parent histogram of " + type + " aggregation [" + name + "] must have min_doc_count of 0"); + addValidationError("parent histogram of " + type + " aggregation [" + name + "] must have min_doc_count of 0"); } } else if (parent instanceof DateHistogramAggregationBuilder) { DateHistogramAggregationBuilder histoParent = (DateHistogramAggregationBuilder) parent; if (histoParent.minDocCount() != 0) { - addValidationError( - "parent histogram of " + type + " aggregation [" + name + "] must have min_doc_count of 0"); + addValidationError("parent histogram of " + type + " aggregation [" + name + "] must have min_doc_count of 0"); } } else if (parent instanceof AutoDateHistogramAggregationBuilder) { // Nothing to check } else { addValidationError( - type + " aggregation [" + name + "] must have a histogram, date_histogram or auto_date_histogram as parent"); + type + " aggregation [" + name + "] must have a histogram, date_histogram or auto_date_histogram as parent" + ); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregatorBuilders.java b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregatorBuilders.java index f663c5654b6d2..1409ec94009a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregatorBuilders.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregatorBuilders.java @@ -30,8 +30,7 @@ public final class PipelineAggregatorBuilders { - private PipelineAggregatorBuilders() { - } + private PipelineAggregatorBuilders() {} public static DerivativePipelineAggregationBuilder derivative(String name, String bucketsPath) { return new DerivativePipelineAggregationBuilder(name, bucketsPath); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index 2d805e35dbcd0..8be166c0fe4a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -8,8 +8,8 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasable; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorBase; @@ -42,8 +42,14 @@ public abstract class BucketsAggregator extends AggregatorBase { private LongArray docCounts; protected final DocCountProvider docCountProvider; - public BucketsAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent, - CardinalityUpperBound bucketCardinality, Map metadata) throws IOException { + public BucketsAggregator( + String name, + AggregatorFactories factories, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound bucketCardinality, + Map metadata + ) throws IOException { super(name, factories, context, parent, bucketCardinality, metadata); multiBucketConsumer = context.multiBucketConsumer(); docCounts = bigArrays().newLongArray(1, true); @@ -92,14 +98,14 @@ public final void collectExistingBucket(LeafBucketCollector subCollector, int do * @param mergeMap a unary operator which maps a bucket's ordinal to the ordinal it should be merged with. * If a bucket's ordinal is mapped to -1 then the bucket is removed entirely. */ - public final void rewriteBuckets(long newNumBuckets, LongUnaryOperator mergeMap){ + public final void rewriteBuckets(long newNumBuckets, LongUnaryOperator mergeMap) { try (LongArray oldDocCounts = docCounts) { docCounts = bigArrays().newLongArray(newNumBuckets, true); docCounts.fill(0, newNumBuckets, 0); for (long i = 0; i < oldDocCounts.size(); i++) { long docCount = oldDocCounts.get(i); - if(docCount == 0) continue; + if (docCount == 0) continue; // Skip any in the map which have been "removed", signified with -1 long destinationOrdinal = mergeMap.applyAsLong(i); @@ -188,8 +194,8 @@ public int size() { * @param bucketToOrd how to convert a bucket into an ordinal * @param setAggs how to set the sub-aggregation results on a bucket */ - protected final void buildSubAggsForBuckets(B[] buckets, - ToLongFunction bucketToOrd, BiConsumer setAggs) throws IOException { + protected final void buildSubAggsForBuckets(B[] buckets, ToLongFunction bucketToOrd, BiConsumer setAggs) + throws IOException { InternalAggregations[] results = buildSubAggsForBuckets(Arrays.stream(buckets).mapToLong(bucketToOrd).toArray()); for (int i = 0; i < buckets.length; i++) { setAggs.accept(buckets[i], results[i]); @@ -205,8 +211,11 @@ protected final void buildSubAggsForBuckets(B[] buckets, * @param bucketToOrd how to convert a bucket into an ordinal * @param setAggs how to set the sub-aggregation results on a bucket */ - protected final void buildSubAggsForAllBuckets(B[][] buckets, - ToLongFunction bucketToOrd, BiConsumer setAggs) throws IOException { + protected final void buildSubAggsForAllBuckets( + B[][] buckets, + ToLongFunction bucketToOrd, + BiConsumer setAggs + ) throws IOException { int totalBucketOrdsToCollect = 0; for (B[] bucketsForOneResult : buckets) { totalBucketOrdsToCollect += bucketsForOneResult.length; @@ -235,8 +244,12 @@ protected final void buildSubAggsForAllBuckets(B[][] buckets, * @param bucketBuilder how to build a bucket * @param resultBuilder how to build a result from buckets */ - protected final InternalAggregation[] buildAggregationsForFixedBucketCount(long[] owningBucketOrds, int bucketsPerOwningBucketOrd, - BucketBuilderForFixedCount bucketBuilder, Function, InternalAggregation> resultBuilder) throws IOException { + protected final InternalAggregation[] buildAggregationsForFixedBucketCount( + long[] owningBucketOrds, + int bucketsPerOwningBucketOrd, + BucketBuilderForFixedCount bucketBuilder, + Function, InternalAggregation> resultBuilder + ) throws IOException { int totalBuckets = owningBucketOrds.length * bucketsPerOwningBucketOrd; long[] bucketOrdsToCollect = new long[totalBuckets]; int bucketOrdIdx = 0; @@ -252,13 +265,19 @@ protected final InternalAggregation[] buildAggregationsForFixedBucketCount(l for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) { List buckets = new ArrayList<>(bucketsPerOwningBucketOrd); for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { - buckets.add(bucketBuilder.build( - offsetInOwningOrd, bucketDocCount(bucketOrdsToCollect[bucketOrdIdx]), subAggregationResults[bucketOrdIdx++])); + buckets.add( + bucketBuilder.build( + offsetInOwningOrd, + bucketDocCount(bucketOrdsToCollect[bucketOrdIdx]), + subAggregationResults[bucketOrdIdx++] + ) + ); } results[owningOrdIdx] = resultBuilder.apply(buckets); } return results; } + @FunctionalInterface protected interface BucketBuilderForFixedCount { B build(int offsetInOwningOrd, long docCount, InternalAggregations subAggregationResults); @@ -269,8 +288,8 @@ protected interface BucketBuilderForFixedCount { * @param owningBucketOrds owning bucket ordinals for which to build the results * @param resultBuilder how to build a result from the sub aggregation results */ - protected final InternalAggregation[] buildAggregationsForSingleBucket(long[] owningBucketOrds, - SingleBucketResultBuilder resultBuilder) throws IOException { + protected final InternalAggregation[] buildAggregationsForSingleBucket(long[] owningBucketOrds, SingleBucketResultBuilder resultBuilder) + throws IOException { /* * It'd be entirely reasonable to call * `consumeBucketsAndMaybeBreak(owningBucketOrds.length)` @@ -283,6 +302,7 @@ protected final InternalAggregation[] buildAggregationsForSingleBucket(long[] ow } return results; } + @FunctionalInterface protected interface SingleBucketResultBuilder { InternalAggregation build(long owningBucketOrd, InternalAggregations subAggregationResults); @@ -294,8 +314,12 @@ protected interface SingleBucketResultBuilder { * @param owningBucketOrds owning bucket ordinals for which to build the results * @param bucketOrds hash of values to the bucket ordinal */ - protected final InternalAggregation[] buildAggregationsForVariableBuckets(long[] owningBucketOrds, LongKeyedBucketOrds bucketOrds, - BucketBuilderForVariable bucketBuilder, ResultBuilderForVariable resultBuilder) throws IOException { + protected final InternalAggregation[] buildAggregationsForVariableBuckets( + long[] owningBucketOrds, + LongKeyedBucketOrds bucketOrds, + BucketBuilderForVariable bucketBuilder, + ResultBuilderForVariable resultBuilder + ) throws IOException { long totalOrdsToCollect = 0; final int[] bucketsInOrd = new int[owningBucketOrds.length]; for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { @@ -304,14 +328,15 @@ protected final InternalAggregation[] buildAggregationsForVariableBuckets(lo totalOrdsToCollect += bucketCount; } if (totalOrdsToCollect > Integer.MAX_VALUE) { - throw new AggregationExecutionException("Can't collect more than [" + Integer.MAX_VALUE - + "] buckets but attempted [" + totalOrdsToCollect + "]"); + throw new AggregationExecutionException( + "Can't collect more than [" + Integer.MAX_VALUE + "] buckets but attempted [" + totalOrdsToCollect + "]" + ); } long[] bucketOrdsToCollect = new long[(int) totalOrdsToCollect]; int b = 0; for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while(ordsEnum.next()) { + while (ordsEnum.next()) { bucketOrdsToCollect[b++] = ordsEnum.ord(); } } @@ -322,10 +347,17 @@ protected final InternalAggregation[] buildAggregationsForVariableBuckets(lo for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { List buckets = new ArrayList<>(bucketsInOrd[ordIdx]); LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while(ordsEnum.next()) { + while (ordsEnum.next()) { if (bucketOrdsToCollect[b] != ordsEnum.ord()) { - throw new AggregationExecutionException("Iteration order of [" + bucketOrds + "] changed without mutating. [" - + ordsEnum.ord() + "] should have been [" + bucketOrdsToCollect[b] + "]"); + throw new AggregationExecutionException( + "Iteration order of [" + + bucketOrds + + "] changed without mutating. [" + + ordsEnum.ord() + + "] should have been [" + + bucketOrdsToCollect[b] + + "]" + ); } buckets.add(bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults[b++])); } @@ -333,10 +365,12 @@ protected final InternalAggregation[] buildAggregationsForVariableBuckets(lo } return results; } + @FunctionalInterface protected interface BucketBuilderForVariable { B build(long bucketValue, long docCount, InternalAggregations subAggregationResults); } + @FunctionalInterface protected interface ResultBuilderForVariable { InternalAggregation build(long owninigBucketOrd, List buckets); @@ -365,9 +399,14 @@ public BucketComparator bucketComparator(String key, SortOrder order) { if (key == null || "doc_count".equals(key)) { return (lhs, rhs) -> order.reverseMul() * Long.compare(bucketDocCount(lhs), bucketDocCount(rhs)); } - throw new IllegalArgumentException("Ordering on a single-bucket aggregation can only be done on its doc_count. " + - "Either drop the key (a la \"" + name() + "\") or change it to \"doc_count\" (a la \"" + name() + - ".doc_count\") or \"key\"."); + throw new IllegalArgumentException( + "Ordering on a single-bucket aggregation can only be done on its doc_count. " + + "Either drop the key (a la \"" + + name() + + "\") or change it to \"doc_count\" (a la \"" + + name() + + ".doc_count\") or \"key\"." + ); } public static boolean descendsFromGlobalAggregator(Aggregator parent) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java index 3e18498317a78..192d3b1f84858 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java @@ -29,8 +29,13 @@ public abstract class DeferableBucketAggregator extends BucketsAggregator { private DeferringBucketCollector deferringCollector; private List deferredAggregationNames; - protected DeferableBucketAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent, - Map metadata) throws IOException { + protected DeferableBucketAggregator( + String name, + AggregatorFactories factories, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { // Assumes that we're collecting MANY buckets. super(name, factories, context, parent, CardinalityUpperBound.MANY, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java index ce4d13c031961..7fbbe20c69c06 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java @@ -97,19 +97,22 @@ public void collectDebugInfo(BiConsumer add) { @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException { throw new IllegalStateException( - "Deferred collectors cannot be collected directly. They must be collected through the recording wrapper."); + "Deferred collectors cannot be collected directly. They must be collected through the recording wrapper." + ); } @Override public void preCollection() throws IOException { throw new IllegalStateException( - "Deferred collectors cannot be collected directly. They must be collected through the recording wrapper."); + "Deferred collectors cannot be collected directly. They must be collected through the recording wrapper." + ); } @Override public void postCollection() throws IOException { throw new IllegalStateException( - "Deferred collectors cannot be collected directly. They must be collected through the recording wrapper."); + "Deferred collectors cannot be collected directly. They must be collected through the recording wrapper." + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java index 5498ce2e884bf..4e5756ba869e6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java @@ -40,8 +40,7 @@ public abstract class InternalSingleBucketAggregation extends InternalAggregatio * @param docCount The document count in the single bucket. * @param aggregations The already built sub-aggregations that are associated with the bucket. */ - protected InternalSingleBucketAggregation(String name, long docCount, InternalAggregations aggregations, - Map metadata) { + protected InternalSingleBucketAggregation(String name, long docCount, InternalAggregations aggregations, Map metadata) { super(name, metadata); this.docCount = docCount; this.aggregations = aggregations; @@ -108,14 +107,17 @@ public InternalAggregation reduce(List aggregations, Reduce */ @Override public final InternalAggregation reducePipelines( - InternalAggregation reducedAggs, ReduceContext reduceContext, PipelineTree pipelineTree) { + InternalAggregation reducedAggs, + ReduceContext reduceContext, + PipelineTree pipelineTree + ) { assert reduceContext.isFinalReduce(); InternalAggregation reduced = this; if (pipelineTree.hasSubTrees()) { List aggs = new ArrayList<>(); for (Aggregation agg : getAggregations().asList()) { PipelineTree subTree = pipelineTree.subTree(agg.getName()); - aggs.add(((InternalAggregation)agg).reducePipelines((InternalAggregation)agg, reduceContext, subTree)); + aggs.add(((InternalAggregation) agg).reducePipelines((InternalAggregation) agg, reduceContext, subTree)); } InternalAggregations reducedSubAggs = InternalAggregations.from(aggs); reduced = create(reducedSubAggs); @@ -154,8 +156,12 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th public final double sortValue(String key) { if (key != null && false == key.equals("doc_count")) { throw new IllegalArgumentException( - "Unknown value key [" + key + "] for single-bucket aggregation [" + getName() + - "]. Either use [doc_count] as key or drop the key all together."); + "Unknown value key [" + + key + + "] for single-bucket aggregation [" + + getName() + + "]. Either use [doc_count] as key or drop the key all together." + ); } return docCount; } @@ -191,8 +197,7 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalSingleBucketAggregation other = (InternalSingleBucketAggregation) obj; - return Objects.equals(docCount, other.docCount) && - Objects.equals(aggregations, other.aggregations); + return Objects.equals(docCount, other.docCount) && Objects.equals(aggregations, other.aggregations); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/IteratorAndCurrent.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/IteratorAndCurrent.java index 78d6c38337d0a..af8757e10ccf7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/IteratorAndCurrent.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/IteratorAndCurrent.java @@ -39,4 +39,3 @@ public B next() { return current = iterator.next(); } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/ParsedSingleBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/ParsedSingleBucketAggregation.java index b9be0b8aa87df..8d708d671ce23 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/ParsedSingleBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/ParsedSingleBucketAggregation.java @@ -51,7 +51,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th } protected static T parseXContent(final XContentParser parser, T aggregation, String name) - throws IOException { + throws IOException { aggregation.setName(name); XContentParser.Token token = parser.currentToken(); String currentFieldName = parser.currentName(); @@ -72,8 +72,12 @@ protected static T parseXContent(final if (CommonFields.META.getPreferredName().equals(currentFieldName)) { aggregation.metadata = parser.map(); } else { - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, - aggregations::add); + XContentParserUtils.parseTypedKeysObject( + parser, + Aggregation.TYPED_KEYS_DELIMITER, + Aggregation.class, + aggregations::add + ); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/SingleBucketAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/SingleBucketAggregator.java index 392aa47b4be89..a6086df019dac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/SingleBucketAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/SingleBucketAggregator.java @@ -10,5 +10,4 @@ /** * A bucket aggregator that doesn't create new buckets. */ -public interface SingleBucketAggregator { -} +public interface SingleBucketAggregator {} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrix.java index 1589ddcb09860..7f2785beb9bc1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrix.java @@ -23,8 +23,7 @@ public interface AdjacencyMatrix extends MultiBucketsAggregation { * A bucket associated with a specific filter or pair (identified by its * key) */ - interface Bucket extends MultiBucketsAggregation.Bucket { - } + interface Bucket extends MultiBucketsAggregation.Bucket {} /** * The buckets created by this aggregation. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java index be55f5dad871d..0e474c4e37276 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregationBuilder.java @@ -9,10 +9,10 @@ package org.elasticsearch.search.aggregations.bucket.adjacency; import org.apache.lucene.search.BooleanQuery; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; @@ -46,8 +46,10 @@ public class AdjacencyMatrixAggregationBuilder extends AbstractAggregationBuilde private List filters; private String separator = DEFAULT_SEPARATOR; - private static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, AdjacencyMatrixAggregationBuilder::new); + private static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + AdjacencyMatrixAggregationBuilder::new + ); static { PARSER.declareString(AdjacencyMatrixAggregationBuilder::separator, SEPARATOR_FIELD); PARSER.declareNamedObjects(AdjacencyMatrixAggregationBuilder::setFiltersAsList, KeyedFilter.PARSER, FILTERS_FIELD); @@ -67,7 +69,6 @@ protected AdjacencyMatrixAggregationBuilder(String name) { super(name); } - /** * @param name * the name of this aggregation @@ -78,8 +79,11 @@ public AdjacencyMatrixAggregationBuilder(String name, Map this(name, DEFAULT_SEPARATOR, filters); } - protected AdjacencyMatrixAggregationBuilder(AdjacencyMatrixAggregationBuilder clone, - Builder factoriesBuilder, Map metadata) { + protected AdjacencyMatrixAggregationBuilder( + AdjacencyMatrixAggregationBuilder clone, + Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.filters = new ArrayList<>(clone.filters); this.separator = clone.separator; @@ -130,7 +134,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { private void checkConsistency() { if ((filters == null) || (filters.size() == 0)) { - throw new IllegalStateException("[" + name + "] is missing : " + FILTERS_FIELD.getPreferredName() + " parameter"); + throw new IllegalStateException("[" + name + "] is missing : " + FILTERS_FIELD.getPreferredName() + " parameter"); } } @@ -153,8 +157,6 @@ private AdjacencyMatrixAggregationBuilder setFiltersAsList(List fil return this; } - - /** * Set the separator used to join pairs of bucket keys */ @@ -177,7 +179,7 @@ public String separator() { * Get the filters. This will be an unmodifiable map */ public Map filters() { - Mapresult = new HashMap<>(this.filters.size()); + Map result = new HashMap<>(this.filters.size()); for (KeyedFilter keyedFilter : this.filters) { result.put(keyedFilter.key(), keyedFilter.filter()); } @@ -201,14 +203,19 @@ protected AdjacencyMatrixAggregationBuilder doRewrite(QueryRewriteContext queryR @Override protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, Builder subFactoriesBuilder) - throws IOException { + throws IOException { int maxFilters = BooleanQuery.getMaxClauseCount(); - if (filters.size() > maxFilters){ + if (filters.size() > maxFilters) { throw new IllegalArgumentException( - "Number of filters is too large, must be less than or equal to: [" + maxFilters + "] but was [" - + filters.size() + "]." - + "This limit can be set by changing the [" + SearchModule.INDICES_MAX_CLAUSE_COUNT_SETTING.getKey() - + "] setting."); + "Number of filters is too large, must be less than or equal to: [" + + maxFilters + + "] but was [" + + filters.size() + + "]." + + "This limit can be set by changing the [" + + SearchModule.INDICES_MAX_CLAUSE_COUNT_SETTING.getKey() + + "] setting." + ); } return new AdjacencyMatrixAggregatorFactory(name, filters, separator, context, parent, subFactoriesBuilder, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java index 33e496f8c4a00..b7e25985bffc5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java @@ -11,12 +11,12 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -51,9 +51,10 @@ protected static class KeyedFilter implements Writeable, ToXContentFragment { private final String key; private final QueryBuilder filter; - public static final NamedObjectParser PARSER = - (XContentParser p, String aggName, String name) -> - new KeyedFilter(name, parseInnerQueryBuilder(p)); + public static final NamedObjectParser PARSER = ( + XContentParser p, + String aggName, + String name) -> new KeyedFilter(name, parseInnerQueryBuilder(p)); public KeyedFilter(String key, QueryBuilder filter) { if (key == null) { @@ -118,8 +119,16 @@ public boolean equals(Object obj) { private final int totalNumIntersections; private final String separator; - public AdjacencyMatrixAggregator(String name, AggregatorFactories factories, String separator, String[] keys, - Weight[] filters, AggregationContext context, Aggregator parent, Map metadata) throws IOException { + public AdjacencyMatrixAggregator( + String name, + AggregatorFactories factories, + String separator, + String[] keys, + Weight[] filters, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, factories, context, parent, CardinalityUpperBound.MANY, metadata); this.separator = separator; this.keys = keys; @@ -194,8 +203,11 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I // a date-histogram where we will look for transactions over time and can expect many // empty buckets. if (docCount > 0) { - InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket(keys[i], - docCount, bucketSubAggs[builtBucketIndex++]); + InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( + keys[i], + docCount, + bucketSubAggs[builtBucketIndex++] + ); buckets.add(bucket); } } @@ -207,8 +219,11 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I // Empty buckets are not returned due to potential for very sparse matrices if (docCount > 0) { String intersectKey = keys[i] + separator + keys[j]; - InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket(intersectKey, - docCount, bucketSubAggs[builtBucketIndex++]); + InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( + intersectKey, + docCount, + bucketSubAggs[builtBucketIndex++] + ); buckets.add(bucket); } pos++; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregatorFactory.java index 570352d082911..46447c5e969cf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregatorFactory.java @@ -29,9 +29,15 @@ public class AdjacencyMatrixAggregatorFactory extends AggregatorFactory { private final Weight[] weights; private final String separator; - public AdjacencyMatrixAggregatorFactory(String name, List filters, String separator, - AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactories, Map metadata) throws IOException { + public AdjacencyMatrixAggregatorFactory( + String name, + List filters, + String separator, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactories, + Map metadata + ) throws IOException { super(name, context, parent, subFactories, metadata); IndexSearcher contextSearcher = context.searcher(); this.separator = separator; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index 4edd643790f37..0cae5b815246a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -24,9 +24,9 @@ import java.util.Map; import java.util.Objects; -public class InternalAdjacencyMatrix - extends InternalMultiBucketAggregation - implements AdjacencyMatrix { +public class InternalAdjacencyMatrix extends InternalMultiBucketAggregation + implements + AdjacencyMatrix { public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements AdjacencyMatrix.Bucket { private final String key; @@ -95,8 +95,8 @@ public boolean equals(Object other) { } InternalBucket that = (InternalBucket) other; return Objects.equals(key, that.key) - && Objects.equals(docCount, that.docCount) - && Objects.equals(aggregations, that.aggregations); + && Objects.equals(docCount, that.docCount) + && Objects.equals(aggregations, that.aggregations); } @Override @@ -173,7 +173,7 @@ public InternalAggregation reduce(List aggregations, Reduce InternalAdjacencyMatrix filters = (InternalAdjacencyMatrix) aggregation; for (InternalBucket bucket : filters.buckets) { List sameRangeList = bucketsMap.get(bucket.key); - if(sameRangeList == null){ + if (sameRangeList == null) { sameRangeList = new ArrayList<>(aggregations.size()); bucketsMap.put(bucket.key, sameRangeList); } @@ -184,7 +184,7 @@ public InternalAggregation reduce(List aggregations, Reduce ArrayList reducedBuckets = new ArrayList<>(bucketsMap.size()); for (List sameRangeList : bucketsMap.values()) { InternalBucket reducedBucket = reduceBucket(sameRangeList, reduceContext); - if(reducedBucket.docCount >= 1){ + if (reducedBucket.docCount >= 1) { reducedBuckets.add(reducedBucket); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/ParsedAdjacencyMatrix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/ParsedAdjacencyMatrix.java index 0d6d29cc35b73..0833f7c5064f8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/ParsedAdjacencyMatrix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/ParsedAdjacencyMatrix.java @@ -42,12 +42,17 @@ public ParsedBucket getBucketByKey(String key) { return bucketMap.get(key); } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedAdjacencyMatrix.class.getSimpleName(), true, ParsedAdjacencyMatrix::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedAdjacencyMatrix.class.getSimpleName(), + true, + ParsedAdjacencyMatrix::new + ); static { - declareMultiBucketAggregationFields(PARSER, - parser -> ParsedBucket.fromXContent(parser), - parser -> ParsedBucket.fromXContent(parser)); + declareMultiBucketAggregationFields( + PARSER, + parser -> ParsedBucket.fromXContent(parser), + parser -> ParsedBucket.fromXContent(parser) + ); } public static ParsedAdjacencyMatrix fromXContent(XContentParser parser, String name) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java index 1a079717477cb..04ef6088a3f09 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java @@ -37,9 +37,16 @@ class BinaryValuesSource extends SingleDimensionValuesSource { private ObjectArray valueBuilders; private BytesRef currentValue; - BinaryValuesSource(BigArrays bigArrays, LongConsumer breakerConsumer, - MappedFieldType fieldType, CheckedFunction docValuesFunc, - DocValueFormat format, boolean missingBucket, int size, int reverseMul) { + BinaryValuesSource( + BigArrays bigArrays, + LongConsumer breakerConsumer, + MappedFieldType fieldType, + CheckedFunction docValuesFunc, + DocValueFormat format, + boolean missingBucket, + int size, + int reverseMul + ) { super(bigArrays, format, fieldType, missingBucket, size, reverseMul); this.breakerConsumer = breakerConsumer; this.docValuesFunc = docValuesFunc; @@ -49,8 +56,8 @@ class BinaryValuesSource extends SingleDimensionValuesSource { @Override void copyCurrent(int slot) { - values = bigArrays.grow(values, slot+1); - valueBuilders = bigArrays.grow(valueBuilders, slot+1); + values = bigArrays.grow(values, slot + 1); + valueBuilders = bigArrays.grow(valueBuilders, slot + 1); BytesRefBuilder builder = valueBuilders.get(slot); int byteSize = builder == null ? 0 : builder.bytes().length; if (builder == null) { @@ -138,7 +145,7 @@ void setAfter(Comparable value) { @Override BytesRef toComparable(int slot) { - return values.get(slot); + return values.get(slot); } @Override @@ -177,9 +184,9 @@ public void collect(int doc, long bucket) throws IOException { @Override SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query query) { - if (checkIfSortedDocsIsApplicable(reader, fieldType) == false || - fieldType instanceof StringFieldType == false || - (query != null && query.getClass() != MatchAllDocsQuery.class)) { + if (checkIfSortedDocsIsApplicable(reader, fieldType) == false + || fieldType instanceof StringFieldType == false + || (query != null && query.getClass() != MatchAllDocsQuery.class)) { return null; } return new TermsSortedDocsProducer(fieldType.name()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregation.java index 838573b8d0c8f..9531fab37bf3f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregation.java @@ -29,8 +29,7 @@ interface Bucket extends MultiBucketsAggregation.Bucket { */ Map afterKey(); - static XContentBuilder bucketToXContent(CompositeAggregation.Bucket bucket, - XContentBuilder builder, Params params) throws IOException { + static XContentBuilder bucketToXContent(CompositeAggregation.Bucket bucket, XContentBuilder builder, Params params) throws IOException { builder.startObject(); buildCompositeMap(CommonFields.KEY.getPreferredName(), bucket.getKey(), builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), bucket.getDocCount()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java index c619a91a0b133..614a0a84aa1a0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.composite; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -40,14 +40,16 @@ public class CompositeAggregationBuilder extends AbstractAggregationBuilder PARSER = new ConstructingObjectParser<>( - NAME, false, (args, name) -> { - @SuppressWarnings("unchecked") - List> sources = (List>) args[0]; - return new CompositeAggregationBuilder(name, sources); - }); + NAME, + false, + (args, name) -> { + @SuppressWarnings("unchecked") + List> sources = (List>) args[0]; + return new CompositeAggregationBuilder(name, sources); + } + ); static { - PARSER.declareObjectArray(constructorArg(), - (p, c) -> CompositeValuesSourceParserHelper.fromXContent(p), SOURCES_FIELD_NAME); + PARSER.declareObjectArray(constructorArg(), (p, c) -> CompositeValuesSourceParserHelper.fromXContent(p), SOURCES_FIELD_NAME); PARSER.declareInt(CompositeAggregationBuilder::size, SIZE_FIELD_NAME); PARSER.declareObject(CompositeAggregationBuilder::aggregateAfter, (p, context) -> p.map(), AFTER_FIELD_NAME); } @@ -70,8 +72,11 @@ public CompositeAggregationBuilder(String name, List metadata) { + protected CompositeAggregationBuilder( + CompositeAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.sources = new ArrayList<>(clone.sources); this.after = clone.after; @@ -194,12 +199,19 @@ private static void validateSources(List> source } @Override - protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subfactoriesBuilder) throws IOException { + protected AggregatorFactory doBuild( + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subfactoriesBuilder + ) throws IOException { AggregatorFactory invalid = validateParentAggregations(parent); if (invalid != null) { - throw new IllegalArgumentException("[composite] aggregation cannot be used with a parent aggregation of" + - " type: [" + invalid.getClass().getSimpleName() + "]"); + throw new IllegalArgumentException( + "[composite] aggregation cannot be used with a parent aggregation of" + + " type: [" + + invalid.getClass().getSimpleName() + + "]" + ); } CompositeValuesSourceConfig[] configs = new CompositeValuesSourceConfig[sources.size()]; for (int i = 0; i < configs.length; i++) { @@ -211,8 +223,7 @@ protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactor final CompositeKey afterKey; if (after != null) { if (after.size() != configs.length) { - throw new IllegalArgumentException("[after] has " + after.size() + - " value(s) but [sources] has " + sources.size()); + throw new IllegalArgumentException("[after] has " + after.size() + " value(s) but [sources] has " + sources.size()); } @SuppressWarnings("rawtypes") Comparable[] values = new Comparable[sources.size()]; @@ -227,8 +238,13 @@ protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactor } else if (obj instanceof Comparable) { values[i] = (Comparable) obj; } else { - throw new IllegalArgumentException("Invalid value for [after." + sources.get(i).name() + - "], expected comparable, got [" + (obj == null ? "null" : obj.getClass().getSimpleName()) + "]"); + throw new IllegalArgumentException( + "Invalid value for [after." + + sources.get(i).name() + + "], expected comparable, got [" + + (obj == null ? "null" : obj.getClass().getSimpleName()) + + "]" + ); } } afterKey = new CompositeKey(values); @@ -238,13 +254,12 @@ protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactor return new CompositeAggregationFactory(name, context, parent, subfactoriesBuilder, metadata, size, configs, afterKey); } - @Override protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(SIZE_FIELD_NAME.getPreferredName(), size); builder.startArray(SOURCES_FIELD_NAME.getPreferredName()); - for (CompositeValuesSourceBuilder source: sources) { + for (CompositeValuesSourceBuilder source : sources) { CompositeValuesSourceParserHelper.toXContent(source, builder, params); } builder.endArray(); @@ -266,8 +281,6 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; CompositeAggregationBuilder other = (CompositeAggregationBuilder) obj; - return size == other.size && - Objects.equals(sources, other.sources) && - Objects.equals(after, other.after); + return size == other.size && Objects.equals(sources, other.sources) && Objects.equals(after, other.after); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationFactory.java index 9b98d96f54675..a26fc7ec44fc7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationFactory.java @@ -22,9 +22,16 @@ class CompositeAggregationFactory extends AggregatorFactory { private final CompositeValuesSourceConfig[] sources; private final CompositeKey afterKey; - CompositeAggregationFactory(String name, AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, Map metadata, - int size, CompositeValuesSourceConfig[] sources, CompositeKey afterKey) throws IOException { + CompositeAggregationFactory( + String name, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + int size, + CompositeValuesSourceConfig[] sources, + CompositeKey afterKey + ) throws IOException { super(name, context, parent, subFactoriesBuilder, metadata); this.size = size; this.sources = sources; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index a9eef47d3d1da..ff3aefc847202 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -79,9 +79,16 @@ final class CompositeAggregator extends BucketsAggregator { private boolean earlyTerminated; - CompositeAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent, - Map metadata, - int size, CompositeValuesSourceConfig[] sourceConfigs, CompositeKey rawAfterKey) throws IOException { + CompositeAggregator( + String name, + AggregatorFactories factories, + AggregationContext context, + Aggregator parent, + Map metadata, + int size, + CompositeValuesSourceConfig[] sourceConfigs, + CompositeKey rawAfterKey + ) throws IOException { super(name, factories, context, parent, CardinalityUpperBound.MANY, metadata); this.size = size; this.sourceNames = Arrays.stream(sourceConfigs).map(CompositeValuesSourceConfig::name).collect(Collectors.toList()); @@ -91,9 +98,17 @@ final class CompositeAggregator extends BucketsAggregator { // check that the provided size is not greater than the search.max_buckets setting int bucketLimit = context.multiBucketConsumer().getLimit(); if (size > bucketLimit) { - throw new MultiBucketConsumerService.TooManyBucketsException("Trying to create too many buckets. Must be less than or equal" + - " to: [" + bucketLimit + "] but was [" + size + "]. This limit can be set by changing the [" + MAX_BUCKET_SETTING.getKey() + - "] cluster level setting.", bucketLimit); + throw new MultiBucketConsumerService.TooManyBucketsException( + "Trying to create too many buckets. Must be less than or equal" + + " to: [" + + bucketLimit + + "] but was [" + + size + + "]. This limit can be set by changing the [" + + MAX_BUCKET_SETTING.getKey() + + "] cluster level setting.", + bucketLimit + ); } this.sourceConfigs = sourceConfigs; for (int i = 0; i < sourceConfigs.length; i++) { @@ -109,8 +124,10 @@ final class CompositeAggregator extends BucketsAggregator { try { this.queue.setAfterKey(rawAfterKey); } catch (IllegalArgumentException ex) { - throw new ElasticsearchParseException("Cannot set after key in the composite aggregation [" + name + "] - " + - ex.getMessage(), ex); + throw new ElasticsearchParseException( + "Cannot set after key in the composite aggregation [" + name + "] - " + ex.getMessage(), + ex + ); } } this.rawAfterKey = rawAfterKey; @@ -159,17 +176,24 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I long docCount = queue.getDocCount(slot); buckets[queue.size()] = new InternalComposite.InternalBucket(sourceNames, formats, key, reverseMuls, docCount, aggs); } - CompositeKey lastBucket = num > 0 ? buckets[num-1].getRawKey() : null; + CompositeKey lastBucket = num > 0 ? buckets[num - 1].getRawKey() : null; return new InternalAggregation[] { - new InternalComposite(name, size, sourceNames, formats, Arrays.asList(buckets), lastBucket, reverseMuls, - earlyTerminated, metadata()) - }; + new InternalComposite( + name, + size, + sourceNames, + formats, + Arrays.asList(buckets), + lastBucket, + reverseMuls, + earlyTerminated, + metadata() + ) }; } @Override public InternalAggregation buildEmptyAggregation() { - return new InternalComposite(name, size, sourceNames, formats, Collections.emptyList(), null, reverseMuls, - false, metadata()); + return new InternalComposite(name, size, sourceNames, formats, Collections.emptyList(), null, reverseMuls, false, metadata()); } private void finishLeaf() { @@ -218,11 +242,11 @@ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException SingleDimensionValuesSource source = sources[i]; SortField indexSortField = indexSort.getSort()[i]; if (source.fieldType == null - // TODO: can we handle missing bucket when using index sort optimization ? - || source.missingBucket - || indexSortField.getField().equals(source.fieldType.name()) == false - || isMaybeMultivalued(context, indexSortField) - || sourceConfig.hasScript()) { + // TODO: can we handle missing bucket when using index sort optimization ? + || source.missingBucket + || indexSortField.getField().equals(source.fieldType.name()) == false + || isMaybeMultivalued(context, indexSortField) + || sourceConfig.hasScript()) { break; } @@ -294,10 +318,12 @@ public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws I return new LongLeafComparator(context) { @Override protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) - throws IOException { - NumericDocValues dvs = SortedNumericSelector.wrap( - DocValues.getSortedNumeric(context.reader(), field), - delegate.getSelector(), delegate.getNumericType()); + throws IOException { + NumericDocValues dvs = SortedNumericSelector.wrap( + DocValues.getSortedNumeric(context.reader(), field), + delegate.getSelector(), + delegate.getNumericType() + ); return new NumericDocValues() { @Override public long longValue() throws IOException { @@ -347,22 +373,26 @@ private void processLeafFromQuery(LeafReaderContext ctx, Sort indexSortPrefix) t for (int i = 0; i < formats.length; i++) { formats[i] = sources[i].format; } - FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc(new SortAndFormats(indexSortPrefix, formats), - Arrays.copyOfRange(rawAfterKey.values(), 0, formats.length), null); + FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc( + new SortAndFormats(indexSortPrefix, formats), + Arrays.copyOfRange(rawAfterKey.values(), 0, formats.length), + null + ); if (indexSortPrefix.getSort().length < sources.length) { // include all docs that belong to the partial bucket fieldDoc.doc = -1; } - BooleanQuery newQuery = new BooleanQuery.Builder() - .add(topLevelQuery(), BooleanClause.Occur.MUST) + BooleanQuery newQuery = new BooleanQuery.Builder().add(topLevelQuery(), BooleanClause.Occur.MUST) .add(new SearchAfterSortedDocQuery(applySortFieldRounding(indexSortPrefix), fieldDoc), BooleanClause.Occur.FILTER) .build(); Weight weight = searcher().createWeight(searcher().rewrite(newQuery), ScoreMode.COMPLETE_NO_SCORES, 1f); Scorer scorer = weight.scorer(ctx); if (scorer != null) { DocIdSetIterator docIt = scorer.iterator(); - final LeafBucketCollector inner = queue.getLeafCollector(ctx, - getFirstPassCollector(docIdSetBuilder, indexSortPrefix.getSort().length)); + final LeafBucketCollector inner = queue.getLeafCollector( + ctx, + getFirstPassCollector(docIdSetBuilder, indexSortPrefix.getSort().length) + ); inner.setScorer(scorer); final Bits liveDocs = ctx.reader().getLiveDocs(); while (docIt.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { @@ -382,8 +412,9 @@ protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucket Sort indexSortPrefix = buildIndexSortPrefix(ctx); int sortPrefixLen = computeSortPrefixLen(indexSortPrefix); - SortedDocsProducer sortedDocsProducer = (sortPrefixLen == 0 && parent == null) ? - sources[0].createSortedDocsProducerOrNull(ctx.reader(), topLevelQuery()) : null; + SortedDocsProducer sortedDocsProducer = (sortPrefixLen == 0 && parent == null) + ? sources[0].createSortedDocsProducerOrNull(ctx.reader(), topLevelQuery()) + : null; if (sortedDocsProducer != null) { // Visit documents sorted by the leading source of the composite definition and terminates // when the leading source value is guaranteed to be greater than the lowest composite bucket @@ -526,4 +557,3 @@ private static class Entry { } } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java index f43a8d237a7fc..670b3603583c5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java @@ -64,8 +64,6 @@ public int hashCode() { @Override public String toString() { - return "CompositeKey{" + - "values=" + Arrays.toString(values) + - '}'; + return "CompositeKey{" + "values=" + Arrays.toString(values) + '}'; } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java index 00de32979f6f2..b111d352ddfb5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java @@ -11,10 +11,10 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.util.PriorityQueue; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.LeafBucketCollector; import java.io.IOException; @@ -126,6 +126,7 @@ Comparable getLowerValueLeadSource() { Comparable getUpperValueLeadSource() throws IOException { return size() >= maxSize ? arrays[0].toComparable(top()) : null; } + /** * Returns the document count in slot. */ @@ -140,7 +141,7 @@ private void copyCurrent(int slot, long value) { for (int i = 0; i < arrays.length; i++) { arrays[i].copyCurrent(slot); } - docCounts = bigArrays.grow(docCounts, slot+1); + docCounts = bigArrays.grow(docCounts, slot + 1); docCounts.set(slot, value); } @@ -157,7 +158,7 @@ int compare(int slot1, int slot2) { cmp = arrays[i].compare(slot1, slot2); } if (cmp != 0) { - return cmp > 0 ? i+1 : -(i+1); + return cmp > 0 ? i + 1 : -(i + 1); } } return 0; @@ -200,7 +201,7 @@ private int compareCurrentWithAfter() { for (int i = 0; i < arrays.length; i++) { int cmp = arrays[i].compareCurrentWithAfter(); if (cmp != 0) { - return cmp > 0 ? i+1 : -(i+1); + return cmp > 0 ? i + 1 : -(i + 1); } } return 0; @@ -225,14 +226,15 @@ CompositeKey toCompositeKey(int slot) throws IOException { LeafBucketCollector getLeafCollector(LeafReaderContext context, LeafBucketCollector in) throws IOException { return getLeafCollector(null, context, in); } + /** * Creates the collector that will visit the composite buckets of the matching documents. * If forceLeadSourceValue is not null, the leading source will use this value * for each document. * The provided collector in is called on each composite bucket. */ - LeafBucketCollector getLeafCollector(Comparable forceLeadSourceValue, - LeafReaderContext context, LeafBucketCollector in) throws IOException { + LeafBucketCollector getLeafCollector(Comparable forceLeadSourceValue, LeafReaderContext context, LeafBucketCollector in) + throws IOException { int last = arrays.length - 1; LeafBucketCollector collector = in; boolean requiresRehashingWhenSwitchingLeafReaders = false; @@ -266,7 +268,6 @@ boolean addIfCompetitive(long inc) { return addIfCompetitive(0, inc); } - /** * Add or update the current composite key in the queue if the values are competitive. * diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java index 734dd0641b494..f7d5724b40b6a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java @@ -115,12 +115,12 @@ public boolean equals(Object o) { @SuppressWarnings("unchecked") AB that = (AB) o; - return Objects.equals(field, that.field()) && - Objects.equals(script, that.script()) && - Objects.equals(userValueTypeHint, that.userValuetypeHint()) && - Objects.equals(missingBucket, that.missingBucket()) && - Objects.equals(order, that.order()) && - Objects.equals(format, that.format()); + return Objects.equals(field, that.field()) + && Objects.equals(script, that.script()) + && Objects.equals(userValueTypeHint, that.userValuetypeHint()) + && Objects.equals(missingBucket, that.missingBucket()) + && Objects.equals(order, that.order()) + && Objects.equals(format, that.format()); } public String name() { @@ -215,7 +215,6 @@ public AB order(String order) { return (AB) this; } - /** * Sets the {@link SortOrder} to use to sort values produced this source */ @@ -257,14 +256,21 @@ public String format() { /** * Actually build the values source and its associated configuration. */ - protected abstract CompositeValuesSourceConfig innerBuild(ValuesSourceRegistry registry, - ValuesSourceConfig config) throws IOException; + protected abstract CompositeValuesSourceConfig innerBuild(ValuesSourceRegistry registry, ValuesSourceConfig config) throws IOException; protected abstract ValuesSourceType getDefaultValuesSourceType(); public final CompositeValuesSourceConfig build(AggregationContext context) throws IOException { - ValuesSourceConfig config = ValuesSourceConfig.resolve(context, - userValueTypeHint, field, script, null, timeZone(), format, getDefaultValuesSourceType()); + ValuesSourceConfig config = ValuesSourceConfig.resolve( + context, + userValueTypeHint, + field, + script, + null, + timeZone(), + format, + getDefaultValuesSourceType() + ); return innerBuild(context.getValuesSourceRegistry(), config); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java index 2af553db96e23..d550f2e319595 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java @@ -9,8 +9,8 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.index.IndexReader; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.support.ValuesSource; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java index 05d05ef58e58d..060654aa4a664 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java @@ -9,12 +9,12 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.AbstractObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -28,8 +28,7 @@ public class CompositeValuesSourceParserHelper { static , T> void declareValuesSourceFields(AbstractObjectParser objectParser) { - objectParser.declareField(VB::field, XContentParser::text, - new ParseField("field"), ObjectParser.ValueType.STRING); + objectParser.declareField(VB::field, XContentParser::text, new ParseField("field"), ObjectParser.ValueType.STRING); objectParser.declareBoolean(VB::missingBucket, new ParseField("missing_bucket")); objectParser.declareField(VB::userValuetypeHint, p -> { @@ -37,10 +36,14 @@ static , T> void declareValuesSource return valueType; }, new ParseField("value_type"), ObjectParser.ValueType.STRING); - objectParser.declareField(VB::script, - (parser, context) -> Script.parse(parser), Script.SCRIPT_PARSE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING); + objectParser.declareField( + VB::script, + (parser, context) -> Script.parse(parser), + Script.SCRIPT_PARSE_FIELD, + ObjectParser.ValueType.OBJECT_OR_STRING + ); - objectParser.declareField(VB::order, XContentParser::text, new ParseField("order"), ObjectParser.ValueType.STRING); + objectParser.declareField(VB::order, XContentParser::text, new ParseField("order"), ObjectParser.ValueType.STRING); } public static void writeTo(CompositeValuesSourceBuilder builder, StreamOutput out) throws IOException { @@ -53,8 +56,13 @@ public static void writeTo(CompositeValuesSourceBuilder builder, StreamOutput code = 2; } else if (builder.getClass() == GeoTileGridValuesSourceBuilder.class) { if (out.getVersion().before(Version.V_7_5_0)) { - throw new IOException("Attempting to serialize [" + builder.getClass().getSimpleName() - + "] to a node with unsupported version [" + out.getVersion() + "]"); + throw new IOException( + "Attempting to serialize [" + + builder.getClass().getSimpleName() + + "] to a node with unsupported version [" + + out.getVersion() + + "]" + ); } code = 3; } else { @@ -66,7 +74,7 @@ public static void writeTo(CompositeValuesSourceBuilder builder, StreamOutput public static CompositeValuesSourceBuilder readFrom(StreamInput in) throws IOException { int code = in.readByte(); - switch(code) { + switch (code) { case 0: return new TermsValuesSourceBuilder(in); case 1: @@ -94,7 +102,7 @@ public static CompositeValuesSourceBuilder fromXContent(XContentParser parser token = parser.nextToken(); ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); final CompositeValuesSourceBuilder builder; - switch(type) { + switch (type) { case TermsValuesSourceBuilder.TYPE: builder = TermsValuesSourceBuilder.parse(name, parser); break; @@ -116,7 +124,7 @@ public static CompositeValuesSourceBuilder fromXContent(XContentParser parser } public static XContentBuilder toXContent(CompositeValuesSourceBuilder source, XContentBuilder builder, Params params) - throws IOException { + throws IOException { builder.startObject(); builder.startObject(source.name()); source.toXContent(builder, params); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java index 605b48fe7cc4a..843d6a8acce2e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java @@ -10,15 +10,15 @@ import org.apache.lucene.index.IndexReader; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.script.Script; import org.elasticsearch.search.DocValueFormat; @@ -68,8 +68,10 @@ CompositeValuesSourceConfig apply( DateHistogramCompositeSupplier.class ); - static final ObjectParser PARSER = - ObjectParser.fromBuilder(TYPE, DateHistogramValuesSourceBuilder::new); + static final ObjectParser PARSER = ObjectParser.fromBuilder( + TYPE, + DateHistogramValuesSourceBuilder::new + ); static { PARSER.declareString(DateHistogramValuesSourceBuilder::format, new ParseField("format")); DateIntervalWrapper.declareIntervalFields(PARSER); @@ -135,8 +137,7 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; DateHistogramValuesSourceBuilder other = (DateHistogramValuesSourceBuilder) obj; - return Objects.equals(dateHistogramInterval, other.dateHistogramInterval) - && Objects.equals(timeZone, other.timeZone); + return Objects.equals(dateHistogramInterval, other.dateHistogramInterval) && Objects.equals(timeZone, other.timeZone); } @Override @@ -267,7 +268,8 @@ public static void register(ValuesSourceRegistry.Builder builder) { } ); }, - false); + false + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java index 9dc915f02641f..747e3a1b14a76 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java @@ -33,9 +33,15 @@ class DoubleValuesSource extends SingleDimensionValuesSource { private double currentValue; private boolean missingCurrentValue; - DoubleValuesSource(BigArrays bigArrays, MappedFieldType fieldType, - CheckedFunction docValuesFunc, - DocValueFormat format, boolean missingBucket, int size, int reverseMul) { + DoubleValuesSource( + BigArrays bigArrays, + MappedFieldType fieldType, + CheckedFunction docValuesFunc, + DocValueFormat format, + boolean missingBucket, + int size, + int reverseMul + ) { super(bigArrays, format, fieldType, missingBucket, size, reverseMul); this.docValuesFunc = docValuesFunc; this.bits = missingBucket ? new BitArray(100, bigArrays) : null; @@ -44,7 +50,7 @@ class DoubleValuesSource extends SingleDimensionValuesSource { @Override void copyCurrent(int slot) { - values = bigArrays.grow(values, slot+1); + values = bigArrays.grow(values, slot + 1); if (missingBucket && missingCurrentValue) { bits.clear(slot); } else { @@ -121,9 +127,11 @@ void setAfter(Comparable value) { } else if (value instanceof Number) { afterValue = ((Number) value).doubleValue(); } else { - afterValue = format.parseDouble(value.toString(), false, () -> { - throw new IllegalArgumentException("now() is not supported in [after] key"); - }); + afterValue = format.parseDouble( + value.toString(), + false, + () -> { throw new IllegalArgumentException("now() is not supported in [after] key"); } + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java index 7b6a6f97a409c..7773b2fc170f2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java @@ -9,13 +9,13 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.index.IndexReader; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.geo.GeoBoundingBox; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MappedFieldType; @@ -60,8 +60,11 @@ CompositeValuesSourceConfig apply( static { PARSER = new ObjectParser<>(GeoTileGridValuesSourceBuilder.TYPE); PARSER.declareInt(GeoTileGridValuesSourceBuilder::precision, new ParseField("precision")); - PARSER.declareField(((p, builder, context) -> builder.geoBoundingBox(GeoBoundingBox.parseBoundingBox(p))), - GeoBoundingBox.BOUNDS_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField( + ((p, builder, context) -> builder.geoBoundingBox(GeoBoundingBox.parseBoundingBox(p))), + GeoBoundingBox.BOUNDS_FIELD, + ObjectParser.ValueType.OBJECT + ); CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER); } @@ -78,11 +81,7 @@ static void register(ValuesSourceRegistry.Builder builder) { ValuesSource.GeoPoint geoPoint = (ValuesSource.GeoPoint) valuesSourceConfig.getValuesSource(); // is specified in the builder. final MappedFieldType fieldType = valuesSourceConfig.fieldType(); - GeoTileCellIdSource cellIdSource = new GeoTileCellIdSource( - geoPoint, - precision, - boundingBox - ); + GeoTileCellIdSource cellIdSource = new GeoTileCellIdSource(geoPoint, precision, boundingBox); return new CompositeValuesSourceConfig( name, fieldType, @@ -113,7 +112,8 @@ static void register(ValuesSourceRegistry.Builder builder) { } ); }, - false); + false + ); } private int precision = GeoTileGridAggregationBuilder.DEFAULT_PRECISION; @@ -180,8 +180,7 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; GeoTileGridValuesSourceBuilder other = (GeoTileGridValuesSourceBuilder) obj; - return Objects.equals(precision,other.precision) - && Objects.equals(geoBoundingBox, other.geoBoundingBox); + return Objects.equals(precision, other.precision) && Objects.equals(geoBoundingBox, other.geoBoundingBox); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileValuesSource.java index 5bc6b3c16eb3c..df406b29c726a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileValuesSource.java @@ -25,14 +25,16 @@ * The main differences is {@link GeoTileValuesSource#setAfter(Comparable)} as it needs to accept geotile string values i.e. "zoom/x/y". */ class GeoTileValuesSource extends LongValuesSource { - GeoTileValuesSource(BigArrays bigArrays, - MappedFieldType fieldType, - CheckedFunction docValuesFunc, - LongUnaryOperator rounding, - DocValueFormat format, - boolean missingBucket, - int size, - int reverseMul) { + GeoTileValuesSource( + BigArrays bigArrays, + MappedFieldType fieldType, + CheckedFunction docValuesFunc, + LongUnaryOperator rounding, + DocValueFormat format, + boolean missingBucket, + int size, + int reverseMul + ) { super(bigArrays, fieldType, docValuesFunc, rounding, format, missingBucket, size, reverseMul); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java index 7d60f7110ac04..2813b9c6f7b87 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java @@ -59,6 +59,7 @@ CompositeValuesSourceConfig apply( PARSER.declareDouble(HistogramValuesSourceBuilder::interval, Histogram.INTERVAL_FIELD); CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER); } + static HistogramValuesSourceBuilder parse(String name, XContentParser parser) throws IOException { return PARSER.parse(parser, new HistogramValuesSourceBuilder(name), null); } @@ -97,7 +98,9 @@ public static void register(ValuesSourceRegistry.Builder builder) { ); } ); - }, false); + }, + false + ); } private double interval = 0; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 265bebbc138cd..b8f6f5f9e9f63 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -32,8 +32,9 @@ import java.util.Objects; import java.util.Set; -public class InternalComposite - extends InternalMultiBucketAggregation implements CompositeAggregation { +public class InternalComposite extends InternalMultiBucketAggregation + implements + CompositeAggregation { private final int size; private final List buckets; @@ -44,9 +45,17 @@ public class InternalComposite private final boolean earlyTerminated; - InternalComposite(String name, int size, List sourceNames, List formats, - List buckets, CompositeKey afterKey, int[] reverseMuls, boolean earlyTerminated, - Map metadata) { + InternalComposite( + String name, + int size, + List sourceNames, + List formats, + List buckets, + CompositeKey afterKey, + int[] reverseMuls, + boolean earlyTerminated, + Map metadata + ) { super(name, metadata); this.sourceNames = sourceNames; this.formats = formats; @@ -103,14 +112,19 @@ public InternalComposite create(List newBuckets) { * keep the afterKey of the original aggregation in order * to be able to retrieve the next page even if all buckets have been filtered. */ - return new InternalComposite(name, size, sourceNames, formats, newBuckets, afterKey, - reverseMuls, earlyTerminated, getMetadata()); + return new InternalComposite(name, size, sourceNames, formats, newBuckets, afterKey, reverseMuls, earlyTerminated, getMetadata()); } @Override public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { - return new InternalBucket(prototype.sourceNames, prototype.formats, prototype.key, prototype.reverseMuls, - prototype.docCount, aggregations); + return new InternalBucket( + prototype.sourceNames, + prototype.formats, + prototype.key, + prototype.reverseMuls, + prototype.docCount, + aggregations + ); } public int getSize() { @@ -200,8 +214,7 @@ protected boolean lessThan(BucketIterator a, BucketIterator b) { lastKey = lastBucket.getRawKey(); } reduceContext.consumeBucketsAndMaybeBreak(result.size()); - return new InternalComposite(name, size, sourceNames, reducedFormats, result, lastKey, reverseMuls, - earlyTerminated, metadata); + return new InternalComposite(name, size, sourceNames, reducedFormats, result, lastKey, reverseMuls, earlyTerminated, metadata); } @Override @@ -229,10 +242,10 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalComposite that = (InternalComposite) obj; - return Objects.equals(size, that.size) && - Objects.equals(buckets, that.buckets) && - Objects.equals(afterKey, that.afterKey) && - Arrays.equals(reverseMuls, that.reverseMuls); + return Objects.equals(size, that.size) + && Objects.equals(buckets, that.buckets) + && Objects.equals(afterKey, that.afterKey) + && Arrays.equals(reverseMuls, that.reverseMuls); } @Override @@ -259,7 +272,9 @@ InternalBucket next() { } public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket - implements CompositeAggregation.Bucket, KeyComparable { + implements + CompositeAggregation.Bucket, + KeyComparable { private final CompositeKey key; private final long docCount; @@ -268,9 +283,14 @@ public static class InternalBucket extends InternalMultiBucketAggregation.Intern private final transient List sourceNames; private final transient List formats; - - InternalBucket(List sourceNames, List formats, CompositeKey key, int[] reverseMuls, long docCount, - InternalAggregations aggregations) { + InternalBucket( + List sourceNames, + List formats, + CompositeKey key, + int[] reverseMuls, + long docCount, + InternalAggregations aggregations + ) { this.key = key; this.docCount = docCount; this.aggregations = aggregations; @@ -410,8 +430,20 @@ static Object formatObject(Object obj, DocValueFormat format) { } parsed = format.parseBytesRef(formatted.toString()); if (parsed.equals(obj) == false) { - throw new IllegalArgumentException("Format [" + format + "] created output it couldn't parse for value [" + obj +"] " - + "of type [" + obj.getClass() + "]. parsed value: [" + parsed + "(" + parsed.getClass() + ")]"); + throw new IllegalArgumentException( + "Format [" + + format + + "] created output it couldn't parse for value [" + + obj + + "] " + + "of type [" + + obj.getClass() + + "]. parsed value: [" + + parsed + + "(" + + parsed.getClass() + + ")]" + ); } } else if (obj.getClass() == Long.class) { long value = (long) obj; @@ -420,12 +452,26 @@ static Object formatObject(Object obj, DocValueFormat format) { } else { formatted = format.format(value); } - parsed = format.parseLong(formatted.toString(), false, () -> { - throw new UnsupportedOperationException("Using now() is not supported in after keys"); - }); + parsed = format.parseLong( + formatted.toString(), + false, + () -> { throw new UnsupportedOperationException("Using now() is not supported in after keys"); } + ); if (parsed.equals(((Number) obj).longValue()) == false) { - throw new IllegalArgumentException("Format [" + format + "] created output it couldn't parse for value [" + obj +"] " - + "of type [" + obj.getClass() + "]. parsed value: [" + parsed + "(" + parsed.getClass() + ")]"); + throw new IllegalArgumentException( + "Format [" + + format + + "] created output it couldn't parse for value [" + + obj + + "] " + + "of type [" + + obj.getClass() + + "]. parsed value: [" + + parsed + + "(" + + parsed.getClass() + + ")]" + ); } } else if (obj.getClass() == Double.class) { double value = (double) obj; @@ -434,11 +480,26 @@ static Object formatObject(Object obj, DocValueFormat format) { } else { formatted = format.format(value); } - parsed = format.parseDouble(formatted.toString(), false, - () -> {throw new UnsupportedOperationException("Using now() is not supported in after keys");}); + parsed = format.parseDouble( + formatted.toString(), + false, + () -> { throw new UnsupportedOperationException("Using now() is not supported in after keys"); } + ); if (parsed.equals(((Number) obj).doubleValue()) == false) { - throw new IllegalArgumentException("Format [" + format + "] created output it couldn't parse for value [" + obj +"] " - + "of type [" + obj.getClass() + "]. parsed value: [" + parsed + "(" + parsed.getClass() + ")]"); + throw new IllegalArgumentException( + "Format [" + + format + + "] created output it couldn't parse for value [" + + obj + + "] " + + "of type [" + + obj.getClass() + + "]. parsed value: [" + + parsed + + "(" + + parsed.getClass() + + ")]" + ); } } return formatted; @@ -478,6 +539,7 @@ public Set> entrySet() { public Iterator> iterator() { return new Iterator>() { int pos = 0; + @Override public boolean hasNext() { return pos < values.length; @@ -485,9 +547,11 @@ public boolean hasNext() { @Override public Entry next() { - SimpleEntry entry = - new SimpleEntry<>(keys.get(pos), formatObject(values[pos], formats.get(pos))); - ++ pos; + SimpleEntry entry = new SimpleEntry<>( + keys.get(pos), + formatObject(values[pos], formats.get(pos)) + ); + ++pos; return entry; } }; @@ -501,7 +565,7 @@ public int size() { } @Override - @SuppressWarnings({"rawtypes", "unchecked"}) + @SuppressWarnings({ "rawtypes", "unchecked" }) public int compareTo(ArrayMap that) { if (that == this) { return 0; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java index 86eb07367cd9e..78d5c22bbd64b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java @@ -48,9 +48,16 @@ class LongValuesSource extends SingleDimensionValuesSource { private long currentValue; private boolean missingCurrentValue; - LongValuesSource(BigArrays bigArrays, - MappedFieldType fieldType, CheckedFunction docValuesFunc, - LongUnaryOperator rounding, DocValueFormat format, boolean missingBucket, int size, int reverseMul) { + LongValuesSource( + BigArrays bigArrays, + MappedFieldType fieldType, + CheckedFunction docValuesFunc, + LongUnaryOperator rounding, + DocValueFormat format, + boolean missingBucket, + int size, + int reverseMul + ) { super(bigArrays, format, fieldType, missingBucket, size, reverseMul); this.bigArrays = bigArrays; this.docValuesFunc = docValuesFunc; @@ -61,7 +68,7 @@ class LongValuesSource extends SingleDimensionValuesSource { @Override void copyCurrent(int slot) { - values = bigArrays.grow(values, slot+1); + values = bigArrays.grow(values, slot + 1); if (missingBucket && missingCurrentValue) { bits.clear(slot); } else { @@ -137,9 +144,11 @@ void setAfter(Comparable value) { afterValue = null; } else { // parse the value from a string in case it is a date or a formatted unsigned long. - afterValue = format.parseLong(value.toString(), false, () -> { - throw new IllegalArgumentException("now() is not supported in [after] key"); - }); + afterValue = format.parseLong( + value.toString(), + false, + () -> { throw new IllegalArgumentException("now() is not supported in [after] key"); } + ); } } @@ -191,7 +200,7 @@ private static Query extractQuery(Query query) { return extractQuery(((BoostQuery) query).getQuery()); } else if (query instanceof IndexOrDocValuesQuery) { return extractQuery(((IndexOrDocValuesQuery) query).getIndexQuery()); - } else if (query instanceof ConstantScoreQuery){ + } else if (query instanceof ConstantScoreQuery) { return extractQuery(((ConstantScoreQuery) query).getQuery()); } else { return query; @@ -220,8 +229,7 @@ private static boolean checkMatchAllOrRangeQuery(Query query, String fieldName) @Override SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query query) { query = extractQuery(query); - if (checkIfSortedDocsIsApplicable(reader, fieldType) == false || - checkMatchAllOrRangeQuery(query, fieldType.name()) == false) { + if (checkIfSortedDocsIsApplicable(reader, fieldType) == false || checkMatchAllOrRangeQuery(query, fieldType.name()) == false) { return null; } final byte[] lowerPoint; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/OrdinalValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/OrdinalValuesSource.java index 1df67c650a3cf..79affbaff7039 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/OrdinalValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/OrdinalValuesSource.java @@ -66,9 +66,16 @@ class OrdinalValuesSource extends SingleDimensionValuesSource { private Long lastLookupOrd; // null if nothing cached private BytesRef lastLookupValue; - OrdinalValuesSource(BigArrays bigArrays, LongConsumer breakerConsumer, MappedFieldType type, - CheckedFunction docValuesFunc, - DocValueFormat format, boolean missingBucket, int size, int reverseMul) { + OrdinalValuesSource( + BigArrays bigArrays, + LongConsumer breakerConsumer, + MappedFieldType type, + CheckedFunction docValuesFunc, + DocValueFormat format, + boolean missingBucket, + int size, + int reverseMul + ) { super(bigArrays, format, type, missingBucket, size, reverseMul); this.breakerConsumer = breakerConsumer; this.docValuesFunc = docValuesFunc; @@ -368,9 +375,9 @@ public boolean requiresRehashingWhenSwitchingLeafReaders() { @Override SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query query) { - if (checkIfSortedDocsIsApplicable(reader, fieldType) == false || - fieldType instanceof StringFieldType == false || - (query != null && query.getClass() != MatchAllDocsQuery.class)) { + if (checkIfSortedDocsIsApplicable(reader, fieldType) == false + || fieldType instanceof StringFieldType == false + || (query != null && query.getClass() != MatchAllDocsQuery.class)) { return null; } return new TermsSortedDocsProducer(fieldType.name()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/ParsedComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/ParsedComposite.java index 13275160fead2..576460ad89324 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/ParsedComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/ParsedComposite.java @@ -8,8 +8,8 @@ package org.elasticsearch.search.aggregations.bucket.composite; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; @@ -19,16 +19,20 @@ import java.util.Map; public class ParsedComposite extends ParsedMultiBucketAggregation implements CompositeAggregation { - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedComposite.class.getSimpleName(), true, ParsedComposite::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedComposite.class.getSimpleName(), + true, + ParsedComposite::new + ); static { - PARSER.declareField(ParsedComposite::setAfterKey, (p, c) -> p.mapOrdered(), new ParseField("after_key"), - ObjectParser.ValueType.OBJECT); - declareMultiBucketAggregationFields(PARSER, - parser -> ParsedComposite.ParsedBucket.fromXContent(parser), - parser -> null + PARSER.declareField( + ParsedComposite::setAfterKey, + (p, c) -> p.mapOrdered(), + new ParseField("after_key"), + ObjectParser.ValueType.OBJECT ); + declareMultiBucketAggregationFields(PARSER, parser -> ParsedComposite.ParsedBucket.fromXContent(parser), parser -> null); } private Map afterKey; @@ -41,7 +45,7 @@ public static ParsedComposite fromXContent(XContentParser parser, String name) t * Previous versions (< 6.3) don't send afterKey * in the response so we set it as the last returned buckets. */ - aggregation.setAfterKey(aggregation.getBuckets().get(aggregation.getBuckets().size()-1).key); + aggregation.setAfterKey(aggregation.getBuckets().get(aggregation.getBuckets().size() - 1).key); } return aggregation; } @@ -61,7 +65,7 @@ public Map afterKey() { if (afterKey != null) { return afterKey; } - return buckets.size() > 0 ? buckets.get(buckets.size()-1).getKey() : null; + return buckets.size() > 0 ? buckets.get(buckets.size() - 1).getKey() : null; } private void setAfterKey(Map afterKey) { @@ -99,8 +103,7 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) } static ParsedComposite.ParsedBucket fromXContent(XContentParser parser) throws IOException { - return parseXContent(parser, false, ParsedBucket::new, - (p, bucket) -> bucket.setKey(p.mapOrdered())); + return parseXContent(parser, false, ParsedBucket::new, (p, bucket) -> bucket.setKey(p.mapOrdered())); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java index 0430aa6c2772e..c9c1aaf4fd438 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java @@ -35,8 +35,8 @@ class PointsSortedDocsProducer extends SortedDocsProducer { } @Override - DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, - LeafReaderContext context, boolean fillDocIdSet) throws IOException { + DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) + throws IOException { final PointValues values = context.reader().getPointValues(field); if (values == null) { // no value for the field @@ -83,8 +83,14 @@ private class Visitor implements PointValues.IntersectVisitor { long lastBucket; boolean first = true; - Visitor(LeafReaderContext context, CompositeValuesCollectorQueue queue, DocIdSetBuilder builder, - int bytesPerDim, long lowerBucket, long upperBucket) { + Visitor( + LeafReaderContext context, + CompositeValuesCollectorQueue queue, + DocIdSetBuilder builder, + int bytesPerDim, + long lowerBucket, + long upperBucket + ) { this.context = context; this.maxDoc = context.reader().maxDoc(); this.queue = queue; @@ -109,7 +115,7 @@ public void visit(int docID) throws IOException { @Override public void visit(int docID, byte[] packedValue) throws IOException { if (compare(packedValue, packedValue) != PointValues.Relation.CELL_CROSSES_QUERY) { - remaining --; + remaining--; return; } @@ -117,8 +123,8 @@ public void visit(int docID, byte[] packedValue) throws IOException { if (first == false && bucket != lastBucket) { final DocIdSet docIdSet = bucketDocsBuilder.build(); if (processBucket(queue, context, docIdSet.iterator(), lastBucket, builder) && - // lower bucket is inclusive - lowerBucket != lastBucket) { + // lower bucket is inclusive + lowerBucket != lastBucket) { // this bucket does not have any competitive composite buckets, // we can early terminate the collection because the remaining buckets are guaranteed // to be greater than this bucket. @@ -131,15 +137,15 @@ public void visit(int docID, byte[] packedValue) throws IOException { lastBucket = bucket; first = false; adder.add(docID); - remaining --; + remaining--; } @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - if ((upperPointQuery != null && - FutureArrays.compareUnsigned(minPackedValue, 0, bytesPerDim, upperPointQuery, 0, bytesPerDim) > 0) || - (lowerPointQuery != null && - FutureArrays.compareUnsigned(maxPackedValue, 0, bytesPerDim, lowerPointQuery, 0, bytesPerDim) < 0)) { + if ((upperPointQuery != null + && FutureArrays.compareUnsigned(minPackedValue, 0, bytesPerDim, upperPointQuery, 0, bytesPerDim) > 0) + || (lowerPointQuery != null + && FutureArrays.compareUnsigned(maxPackedValue, 0, bytesPerDim, lowerPointQuery, 0, bytesPerDim) < 0)) { // does not match the query return PointValues.Relation.CELL_OUTSIDE_QUERY; } @@ -162,7 +168,7 @@ public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue } public void flush() throws IOException { - if (first == false) { + if (first == false) { final DocIdSet docIdSet = bucketDocsBuilder.build(); processBucket(queue, context, docIdSet.iterator(), lastBucket, builder); bucketDocsBuilder = null; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java index a6569a2f930b1..7b5d4ca7d3351 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java @@ -46,9 +46,14 @@ abstract class SingleDimensionValuesSource> implements R * @param size The number of values to record. * @param reverseMul -1 if the natural order ({@link SortOrder#ASC} should be reversed. */ - SingleDimensionValuesSource(BigArrays bigArrays, DocValueFormat format, - @Nullable MappedFieldType fieldType, boolean missingBucket, - int size, int reverseMul) { + SingleDimensionValuesSource( + BigArrays bigArrays, + DocValueFormat format, + @Nullable MappedFieldType fieldType, + boolean missingBucket, + int size, + int reverseMul + ) { this.bigArrays = bigArrays; this.format = format; this.fieldType = fieldType; @@ -127,8 +132,8 @@ T getAfter() { * Creates a {@link LeafBucketCollector} that sets the current value for each document to the provided * value and invokes {@link LeafBucketCollector#collect} on the provided next collector. */ - abstract LeafBucketCollector getLeafCollector(Comparable value, - LeafReaderContext context, LeafBucketCollector next) throws IOException; + abstract LeafBucketCollector getLeafCollector(Comparable value, LeafReaderContext context, LeafBucketCollector next) + throws IOException; /** * Returns a {@link SortedDocsProducer} or null if this source cannot produce sorted docs. @@ -139,16 +144,13 @@ abstract LeafBucketCollector getLeafCollector(Comparable value, * Returns true if a {@link SortedDocsProducer} should be used to optimize the execution. */ protected boolean checkIfSortedDocsIsApplicable(IndexReader reader, MappedFieldType fieldType) { - if (fieldType == null || - (missingBucket && afterValue == null) || - fieldType.isSearchable() == false || - // inverse of the natural order - reverseMul == -1) { + if (fieldType == null || (missingBucket && afterValue == null) || fieldType.isSearchable() == false || + // inverse of the natural order + reverseMul == -1) { return false; } - if (reader.hasDeletions() && - (reader.numDocs() == 0 || (double) reader.numDocs() / (double) reader.maxDoc() < 0.5)) { + if (reader.hasDeletions() && (reader.numDocs() == 0 || (double) reader.numDocs() / (double) reader.maxDoc() < 0.5)) { // do not use the index if it has more than 50% of deleted docs return false; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java index 2fc025d60291b..cc23573aead73 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java @@ -40,8 +40,13 @@ abstract class SortedDocsProducer { * Returns true if the queue is full and the current leadSourceBucket did not produce any competitive * composite buckets. */ - protected boolean processBucket(CompositeValuesCollectorQueue queue, LeafReaderContext context, DocIdSetIterator iterator, - Comparable leadSourceBucket, @Nullable DocIdSetBuilder builder) throws IOException { + protected boolean processBucket( + CompositeValuesCollectorQueue queue, + LeafReaderContext context, + DocIdSetIterator iterator, + Comparable leadSourceBucket, + @Nullable DocIdSetBuilder builder + ) throws IOException { final int[] topCompositeCollected = new int[1]; final boolean[] hasCollected = new boolean[1]; final DocCountProvider docCountProvider = new DocCountProvider(); @@ -69,7 +74,7 @@ public void collect(int doc, long bucket) throws IOException { remainingBits = 128; } adder.add(doc); - remainingBits --; + remainingBits--; lastDoc = doc; } } @@ -82,9 +87,7 @@ public void collect(int doc, long bucket) throws IOException { collector.collect(iterator.docID()); } } - if (queue.isFull() && - hasCollected[0] && - topCompositeCollected[0] == 0) { + if (queue.isFull() && hasCollected[0] && topCompositeCollected[0] == 0) { return true; } return false; @@ -95,6 +98,6 @@ public void collect(int doc, long bucket) throws IOException { * Returns the {@link DocIdSet} of the documents that contain a top composite bucket in this leaf or * {@link DocIdSet#EMPTY} if fillDocIdSet is false. */ - abstract DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, - LeafReaderContext context, boolean fillDocIdSet) throws IOException; + abstract DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) + throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java index efe0871d383ed..5895bd1919802 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsSortedDocsProducer.java @@ -28,8 +28,8 @@ class TermsSortedDocsProducer extends SortedDocsProducer { } @Override - DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, - LeafReaderContext context, boolean fillDocIdSet) throws IOException { + DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context, boolean fillDocIdSet) + throws IOException { final Terms terms = context.reader().terms(field); if (terms == null) { // no value for the field @@ -40,7 +40,7 @@ DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, final TermsEnum te = terms.iterator(); if (lowerValue != null) { if (te.seekCeil(lowerValue) == TermsEnum.SeekStatus.END) { - return DocIdSet.EMPTY ; + return DocIdSet.EMPTY; } } else { if (te.next() == null) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java index a05c04812c7b4..1d882595b50a2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java @@ -47,6 +47,7 @@ CompositeValuesSourceConfig apply( SortOrder order ); } + static final String TYPE = "terms"; static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>( TYPE, @@ -139,7 +140,8 @@ static void register(ValuesSourceRegistry.Builder builder) { } ); }, - false); + false + ); builder.register( REGISTRY_KEY, @@ -160,8 +162,7 @@ static void register(ValuesSourceRegistry.Builder builder) { CompositeValuesSourceConfig compositeValuesSourceConfig) -> { if (valuesSourceConfig.hasOrdinals() && reader instanceof DirectoryReader) { - ValuesSource.Bytes.WithOrdinals vs = (ValuesSource.Bytes.WithOrdinals) compositeValuesSourceConfig - .valuesSource(); + ValuesSource.Bytes.WithOrdinals vs = (ValuesSource.Bytes.WithOrdinals) compositeValuesSourceConfig.valuesSource(); return new OrdinalValuesSource( bigArrays, addRequestCircuitBreakerBytes, @@ -187,7 +188,8 @@ static void register(ValuesSourceRegistry.Builder builder) { } } ), - false); + false + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/DocValuesFieldExistsAdapter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/DocValuesFieldExistsAdapter.java index 9500664e30275..5802777fac6cf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/DocValuesFieldExistsAdapter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/DocValuesFieldExistsAdapter.java @@ -57,4 +57,4 @@ void collectDebugInfo(BiConsumer add) { add.accept("specialized_for", "docvalues_field_exists"); add.accept("results_from_metadata", resultsFromMetadata); } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/Filter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/Filter.java index 5f7381ad479eb..f90d009fb9919 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/Filter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/Filter.java @@ -12,5 +12,4 @@ /** * A {@code filter} aggregation. Defines a single bucket that holds all documents that match a specific filter. */ -public interface Filter extends SingleBucketAggregation { -} +public interface Filter extends SingleBucketAggregation {} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java index 6068f3150f918..7d9c778f27353 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java @@ -48,8 +48,11 @@ public FilterAggregationBuilder(String name, QueryBuilder filter) { this.filter = filter; } - protected FilterAggregationBuilder(FilterAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, Map metadata) { + protected FilterAggregationBuilder( + FilterAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.filter = clone.filter; } @@ -87,8 +90,11 @@ protected AggregationBuilder doRewrite(QueryRewriteContext queryRewriteContext) } @Override - protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { + protected AggregatorFactory doBuild( + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { return new FilterAggregatorFactory(name, filter, context, parent, subFactoriesBuilder, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java index 33f2e0c7f99c1..e0d00bde44d4c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java @@ -32,20 +32,21 @@ public class FilterAggregator extends BucketsAggregator implements SingleBucketA private final Supplier filter; - public FilterAggregator(String name, - Supplier filter, - AggregatorFactories factories, - AggregationContext context, - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + public FilterAggregator( + String name, + Supplier filter, + AggregatorFactories factories, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { super(name, factories, context, parent, cardinality, metadata); this.filter = filter; } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { // no need to provide deleted docs to the filter final Bits bits = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filter.get().scorerSupplier(ctx)); return new LeafBucketCollectorBase(sub, null) { @@ -60,8 +61,15 @@ public void collect(int doc, long bucket) throws IOException { @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForSingleBucket(owningBucketOrds, (owningBucketOrd, subAggregationResults) -> - new InternalFilter(name, bucketDocCount(owningBucketOrd), subAggregationResults, metadata())); + return buildAggregationsForSingleBucket( + owningBucketOrds, + (owningBucketOrd, subAggregationResults) -> new InternalFilter( + name, + bucketDocCount(owningBucketOrd), + subAggregationResults, + metadata() + ) + ); } @Override @@ -69,5 +77,3 @@ public InternalAggregation buildEmptyAggregation() { return new InternalFilter(name, 0, buildEmptySubAggregations(), metadata()); } } - - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java index 4c8acf3dda714..f47766b8e50a7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java @@ -28,8 +28,14 @@ public class FilterAggregatorFactory extends AggregatorFactory { private final Query filter; private Weight weight; - public FilterAggregatorFactory(String name, QueryBuilder filterBuilder, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metadata) throws IOException { + public FilterAggregatorFactory( + String name, + QueryBuilder filterBuilder, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { super(name, context, parent, subFactoriesBuilder, metadata); filter = context.buildQuery(filterBuilder); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java index ed08fe30ec901..55b823c1e2bb2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterByFilterAggregator.java @@ -77,8 +77,7 @@ public AdapterBuilder( * {@link FilterByFilterAggregator} into another sort of aggregator * if required. */ - protected abstract T adapt(CheckedFunction delegate) - throws IOException; + protected abstract T adapt(CheckedFunction delegate) throws IOException; public final void add(String key, Query query) throws IOException { if (valid == false) { @@ -269,8 +268,7 @@ public void collect(int docId) throws IOException { } @Override - public void setScorer(Scorable scorer) throws IOException { - } + public void setScorer(Scorable scorer) throws IOException {} } MatchCollector collector = new MatchCollector(); filters().get(0).collect(ctx, collector, live); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/Filters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/Filters.java index 596d3ae44bcd8..aa7cf268844ea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/Filters.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/Filters.java @@ -21,8 +21,7 @@ public interface Filters extends MultiBucketsAggregation { /** * A bucket associated with a specific filter (identified by its key) */ - interface Bucket extends MultiBucketsAggregation.Bucket { - } + interface Bucket extends MultiBucketsAggregation.Bucket {} /** * The buckets created by this aggregation. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java index 0ce1153dbedff..271ce7ccf5a04 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.filter; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; @@ -214,8 +214,17 @@ protected AggregationBuilder doRewrite(QueryRewriteContext queryRewriteContext) @Override protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { - return new FiltersAggregatorFactory(name, filters, keyed, otherBucket, otherBucketKey, context, parent, - subFactoriesBuilder, metadata); + return new FiltersAggregatorFactory( + name, + filters, + keyed, + otherBucket, + otherBucketKey, + context, + parent, + subFactoriesBuilder, + metadata + ); } @Override @@ -240,8 +249,7 @@ protected XContentBuilder internalXContent(XContentBuilder builder, Params param return builder; } - public static FiltersAggregationBuilder parse(String aggregationName, XContentParser parser) - throws IOException { + public static FiltersAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { List filters = new ArrayList<>(); @@ -257,15 +265,19 @@ public static FiltersAggregationBuilder parse(String aggregationName, XContentPa if (OTHER_BUCKET_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { otherBucket = parser.booleanValue(); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]." + ); } } else if (token == XContentParser.Token.VALUE_STRING) { if (OTHER_BUCKET_KEY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { otherBucketKey = parser.text(); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]." + ); } } else if (token == XContentParser.Token.START_OBJECT) { if (FILTERS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -280,8 +292,10 @@ public static FiltersAggregationBuilder parse(String aggregationName, XContentPa } keyed = true; } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]." + ); } } else if (token == XContentParser.Token.START_ARRAY) { if (FILTERS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -294,12 +308,16 @@ public static FiltersAggregationBuilder parse(String aggregationName, XContentPa filters.add(new KeyedFilter(String.valueOf(i), builders.get(i))); } } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]." + ); } } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]." + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java index d857d0e66c8c9..2de7ae21ce493 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java @@ -110,8 +110,7 @@ public boolean equals(Object obj) { return false; } KeyedFilter other = (KeyedFilter) obj; - return Objects.equals(key, other.key) - && Objects.equals(filter, other.filter); + return Objects.equals(key, other.key) && Objects.equals(filter, other.filter); } } @@ -157,26 +156,24 @@ protected FilterByFilterAggregator adapt( if (filterByFilter != null) { return filterByFilter; } - return new FiltersAggregator.Compatible( - name, - factories, - filters, - keyed, - otherBucketKey, - context, - parent, - cardinality, - metadata - ); + return new FiltersAggregator.Compatible(name, factories, filters, keyed, otherBucketKey, context, parent, cardinality, metadata); } private final List> filters; private final boolean keyed; protected final String otherBucketKey; - FiltersAggregator(String name, AggregatorFactories factories, List> filters, boolean keyed, - String otherBucketKey, AggregationContext context, Aggregator parent, CardinalityUpperBound cardinality, - Map metadata) throws IOException { + FiltersAggregator( + String name, + AggregatorFactories factories, + List> filters, + boolean keyed, + String otherBucketKey, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { super(name, factories, context, parent, cardinality.multiply(filters.size() + (otherBucketKey == null ? 0 : 1)), metadata); this.filters = List.copyOf(filters); this.keyed = keyed; @@ -189,14 +186,22 @@ List> filters() { @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForFixedBucketCount(owningBucketOrds, filters.size() + (otherBucketKey == null ? 0 : 1), + return buildAggregationsForFixedBucketCount( + owningBucketOrds, + filters.size() + (otherBucketKey == null ? 0 : 1), (offsetInOwningOrd, docCount, subAggregationResults) -> { if (offsetInOwningOrd < filters.size()) { - return new InternalFilters.InternalBucket(filters.get(offsetInOwningOrd).key().toString(), docCount, - subAggregationResults, keyed); + return new InternalFilters.InternalBucket( + filters.get(offsetInOwningOrd).key().toString(), + docCount, + subAggregationResults, + keyed + ); } return new InternalFilters.InternalBucket(otherBucketKey, docCount, subAggregationResults, keyed); - }, buckets -> new InternalFilters(name, buckets, keyed, metadata())); + }, + buckets -> new InternalFilters(name, buckets, keyed, metadata()) + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorFactory.java index b0b2aa166d1df..ab46cc41c3965 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorFactory.java @@ -27,9 +27,17 @@ public class FiltersAggregatorFactory extends AggregatorFactory { private final boolean otherBucket; private final String otherBucketKey; - public FiltersAggregatorFactory(String name, List filters, boolean keyed, boolean otherBucket, - String otherBucketKey, AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactories, Map metadata) throws IOException { + public FiltersAggregatorFactory( + String name, + List filters, + boolean keyed, + boolean otherBucket, + String otherBucketKey, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactories, + Map metadata + ) throws IOException { super(name, context, parent, subFactories, metadata); this.keyed = keyed; this.otherBucket = otherBucket; @@ -41,10 +49,18 @@ public FiltersAggregatorFactory(String name, List filters, boolean } @Override - public Aggregator createInternal(Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { - return FiltersAggregator.build(name, factories, filters, keyed, - otherBucket ? otherBucketKey : null, context, parent, cardinality, metadata); + public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return FiltersAggregator.build( + name, + factories, + filters, + keyed, + otherBucket ? otherBucketKey : null, + context, + parent, + cardinality, + metadata + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java index 08894533dec02..8c7481f586a63 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java @@ -97,9 +97,9 @@ public boolean equals(Object other) { } InternalBucket that = (InternalBucket) other; return Objects.equals(key, that.key) - && Objects.equals(keyed, that.keyed) - && Objects.equals(docCount, that.docCount) - && Objects.equals(aggregations, that.aggregations); + && Objects.equals(keyed, that.keyed) + && Objects.equals(docCount, that.docCount) + && Objects.equals(aggregations, that.aggregations); } @Override @@ -249,8 +249,7 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalFilters that = (InternalFilters) obj; - return Objects.equals(buckets, that.buckets) - && Objects.equals(keyed, that.keyed); + return Objects.equals(buckets, that.buckets) && Objects.equals(keyed, that.keyed); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MatchAllQueryToFilterAdapter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MatchAllQueryToFilterAdapter.java index 95fbb8449cd6b..a4d85a2ea5b41 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MatchAllQueryToFilterAdapter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MatchAllQueryToFilterAdapter.java @@ -53,4 +53,4 @@ void collectDebugInfo(BiConsumer add) { add.accept("specialized_for", "match_all"); add.accept("results_from_metadata", resultsFromMetadata); } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MatchNoneQueryToFilterAdapter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MatchNoneQueryToFilterAdapter.java index 59cec2207011b..67f2346fe5409 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MatchNoneQueryToFilterAdapter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MatchNoneQueryToFilterAdapter.java @@ -46,4 +46,4 @@ void collectDebugInfo(BiConsumer add) { super.collectDebugInfo(add); add.accept("specialized_for", "match_none"); } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/ParsedFilters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/ParsedFilters.java index d9441bfa3ea40..8827c29f5dbe4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/ParsedFilters.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/ParsedFilters.java @@ -49,12 +49,17 @@ public ParsedBucket getBucketByKey(String key) { return bucketMap.get(key); } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedFilters.class.getSimpleName(), true, ParsedFilters::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedFilters.class.getSimpleName(), + true, + ParsedFilters::new + ); static { - declareMultiBucketAggregationFields(PARSER, - parser -> ParsedBucket.fromXContent(parser, false), - parser -> ParsedBucket.fromXContent(parser, true)); + declareMultiBucketAggregationFields( + PARSER, + parser -> ParsedBucket.fromXContent(parser, false), + parser -> ParsedBucket.fromXContent(parser, true) + ); } public static ParsedFilters fromXContent(XContentParser parser, String name) throws IOException { @@ -99,7 +104,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException { final ParsedBucket bucket = new ParsedBucket(); bucket.setKeyed(keyed); @@ -120,8 +124,12 @@ static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IO bucket.setDocCount(parser.longValue()); } } else if (token == XContentParser.Token.START_OBJECT) { - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, - aggregations::add); + XContentParserUtils.parseTypedKeysObject( + parser, + Aggregation.TYPED_KEYS_DELIMITER, + Aggregation.class, + aggregations::add + ); } } bucket.setAggregations(new Aggregations(aggregations)); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/TermQueryToFilterAdapter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/TermQueryToFilterAdapter.java index bc7ab8f6b8d52..060b3225ee8e1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/TermQueryToFilterAdapter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/TermQueryToFilterAdapter.java @@ -41,4 +41,4 @@ void collectDebugInfo(BiConsumer add) { add.accept("specialized_for", "term"); add.accept("results_from_metadata", resultsFromMetadata); } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGrid.java index e0122cc2b6e82..79917cf897945 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGrid.java @@ -21,8 +21,7 @@ public interface GeoGrid extends MultiBucketsAggregation { * A bucket that is associated with a geo-grid cell. The key of the bucket is * the {@link InternalGeoGridBucket#getKeyAsString()} of the cell */ - interface Bucket extends MultiBucketsAggregation.Bucket { - } + interface Bucket extends MultiBucketsAggregation.Bucket {} /** * @return The buckets of this aggregation (each bucket representing a geo-grid cell) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java index 59d4029409497..f1c7bed70de09 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java @@ -10,12 +10,12 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.geo.GeoBoundingBox; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; @@ -44,24 +44,30 @@ public abstract class GeoGridAggregationBuilder extends ValuesSourceAggregationB protected int shardSize; private GeoBoundingBox geoBoundingBox = new GeoBoundingBox(new GeoPoint(Double.NaN, Double.NaN), new GeoPoint(Double.NaN, Double.NaN)); - @FunctionalInterface protected interface PrecisionParser { int parse(XContentParser parser) throws IOException; } public static ObjectParser createParser( - String name, PrecisionParser precisionParser, Function ctor) { + String name, + PrecisionParser precisionParser, + Function ctor + ) { ObjectParser parser = ObjectParser.fromBuilder(name, ctor); ValuesSourceAggregationBuilder.declareFields(parser, false, false, false); - parser.declareField((p, builder, context) -> builder.precision(precisionParser.parse(p)), FIELD_PRECISION, - org.elasticsearch.common.xcontent.ObjectParser.ValueType.INT); + parser.declareField( + (p, builder, context) -> builder.precision(precisionParser.parse(p)), + FIELD_PRECISION, + org.elasticsearch.common.xcontent.ObjectParser.ValueType.INT + ); parser.declareInt(GeoGridAggregationBuilder::size, FIELD_SIZE); parser.declareInt(GeoGridAggregationBuilder::shardSize, FIELD_SHARD_SIZE); - parser.declareField((p, builder, context) -> { - builder.setGeoBoundingBox(GeoBoundingBox.parseBoundingBox(p)); - }, - GeoBoundingBox.BOUNDS_FIELD, org.elasticsearch.common.xcontent.ObjectParser.ValueType.OBJECT); + parser.declareField( + (p, builder, context) -> { builder.setGeoBoundingBox(GeoBoundingBox.parseBoundingBox(p)); }, + GeoBoundingBox.BOUNDS_FIELD, + org.elasticsearch.common.xcontent.ObjectParser.ValueType.OBJECT + ); return parser; } @@ -116,9 +122,17 @@ protected void innerWriteTo(StreamOutput out) throws IOException { * Creates a new instance of the {@link ValuesSourceAggregatorFactory}-derived class specific to the geo aggregation. */ protected abstract ValuesSourceAggregatorFactory createFactory( - String name, ValuesSourceConfig config, int precision, int requiredSize, int shardSize, - GeoBoundingBox geoBoundingBox, AggregationContext context, AggregatorFactory parent, - Builder subFactoriesBuilder, Map metadata) throws IOException; + String name, + ValuesSourceConfig config, + int precision, + int requiredSize, + int shardSize, + GeoBoundingBox geoBoundingBox, + AggregationContext context, + AggregatorFactory parent, + Builder subFactoriesBuilder, + Map metadata + ) throws IOException; public int precision() { return precision; @@ -126,8 +140,7 @@ public int precision() { public GeoGridAggregationBuilder size(int size) { if (size <= 0) { - throw new IllegalArgumentException( - "[size] must be greater than 0. Found [" + size + "] in [" + name + "]"); + throw new IllegalArgumentException("[size] must be greater than 0. Found [" + size + "] in [" + name + "]"); } this.requiredSize = size; return this; @@ -139,12 +152,11 @@ public int size() { public GeoGridAggregationBuilder shardSize(int shardSize) { if (shardSize <= 0) { - throw new IllegalArgumentException( - "[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]"); - } + throw new IllegalArgumentException("[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]"); + } this.shardSize = shardSize; return this; - } + } public int shardSize() { return shardSize; @@ -166,11 +178,12 @@ public final BucketCardinality bucketCardinality() { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - Builder subFactoriesBuilder) - throws IOException { + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + Builder subFactoriesBuilder + ) throws IOException { int shardSize = this.shardSize; int requiredSize = this.requiredSize; @@ -183,14 +196,25 @@ protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, if (requiredSize <= 0 || shardSize <= 0) { throw new ElasticsearchException( - "parameters [required_size] and [shard_size] must be > 0 in " + getType() + " aggregation [" + name + "]."); + "parameters [required_size] and [shard_size] must be > 0 in " + getType() + " aggregation [" + name + "]." + ); } if (shardSize < requiredSize) { shardSize = requiredSize; } - return createFactory(name, config, precision, requiredSize, shardSize, geoBoundingBox, context, parent, - subFactoriesBuilder, metadata); + return createFactory( + name, + config, + precision, + requiredSize, + shardSize, + geoBoundingBox, + context, + parent, + subFactoriesBuilder, + metadata + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java index 5237e3d88446c..bddef4f45635e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java @@ -38,9 +38,17 @@ public abstract class GeoGridAggregator> extends Bu protected final ValuesSource.Numeric valuesSource; protected final LongKeyedBucketOrds bucketOrds; - GeoGridAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, - int requiredSize, int shardSize, AggregationContext aggregationContext, - Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { + GeoGridAggregator( + String name, + AggregatorFactories factories, + ValuesSource.Numeric valuesSource, + int requiredSize, + int shardSize, + AggregationContext aggregationContext, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { super(name, factories, aggregationContext, parent, CardinalityUpperBound.MANY, metadata); this.valuesSource = valuesSource; this.requiredSize = requiredSize; @@ -57,8 +65,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { SortedNumericDocValues values = valuesSource.longValues(ctx); return new LeafBucketCollectorBase(sub, null) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashCellIdSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashCellIdSource.java index ac7c08946a4d8..29a4e3b9a115d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashCellIdSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashCellIdSource.java @@ -43,9 +43,9 @@ public boolean isFloatingPoint() { @Override public SortedNumericDocValues longValues(LeafReaderContext ctx) { - return geoBoundingBox.isUnbounded() ? - new UnboundedCellValues(valuesSource.geoPointValues(ctx), precision) : - new BoundedCellValues(valuesSource.geoPointValues(ctx), precision, geoBoundingBox); + return geoBoundingBox.isUnbounded() + ? new UnboundedCellValues(valuesSource.geoPointValues(ctx), precision) + : new BoundedCellValues(valuesSource.geoPointValues(ctx), precision, geoBoundingBox); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java index 48f81e620f07a..30b285983e176 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java @@ -33,8 +33,11 @@ public class GeoHashGridAggregationBuilder extends GeoGridAggregationBuilder { GeoGridAggregatorSupplier.class ); - public static final ObjectParser PARSER = - createParser(NAME, GeoUtils::parsePrecision, GeoHashGridAggregationBuilder::new); + public static final ObjectParser PARSER = createParser( + NAME, + GeoUtils::parsePrecision, + GeoHashGridAggregationBuilder::new + ); public GeoHashGridAggregationBuilder(String name) { super(name); @@ -59,19 +62,39 @@ public GeoGridAggregationBuilder precision(int precision) { @Override protected ValuesSourceAggregatorFactory createFactory( - String name, ValuesSourceConfig config, int precision, int requiredSize, int shardSize, - GeoBoundingBox geoBoundingBox, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata) throws IOException { + String name, + ValuesSourceConfig config, + int precision, + int requiredSize, + int shardSize, + GeoBoundingBox geoBoundingBox, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { - GeoGridAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new GeoHashGridAggregatorFactory(name, config, precision, requiredSize, shardSize, geoBoundingBox, - context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); + GeoGridAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new GeoHashGridAggregatorFactory( + name, + config, + precision, + requiredSize, + shardSize, + geoBoundingBox, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } - private GeoHashGridAggregationBuilder(GeoHashGridAggregationBuilder clone, AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + private GeoHashGridAggregationBuilder( + GeoHashGridAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java index b3527521a1614..c07030a2069f5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java @@ -23,15 +23,22 @@ */ public class GeoHashGridAggregator extends GeoGridAggregator { - public GeoHashGridAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, - int requiredSize, int shardSize, AggregationContext context, - Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { + public GeoHashGridAggregator( + String name, + AggregatorFactories factories, + ValuesSource.Numeric valuesSource, + int requiredSize, + int shardSize, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { super(name, factories, valuesSource, requiredSize, shardSize, context, parent, cardinality, metadata); } @Override - InternalGeoHashGrid buildAggregation(String name, int requiredSize, List buckets, - Map metadata) { + InternalGeoHashGrid buildAggregation(String name, int requiredSize, List buckets, Map metadata) { return new InternalGeoHashGrid(name, requiredSize, buckets, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java index b299e9217fde7..91638be9901b3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java @@ -36,11 +36,19 @@ public class GeoHashGridAggregatorFactory extends ValuesSourceAggregatorFactory private final int shardSize; private final GeoBoundingBox geoBoundingBox; - GeoHashGridAggregatorFactory(String name, ValuesSourceConfig config, int precision, int requiredSize, - int shardSize, GeoBoundingBox geoBoundingBox, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - GeoGridAggregatorSupplier aggregatorSupplier) throws IOException { + GeoHashGridAggregatorFactory( + String name, + ValuesSourceConfig config, + int precision, + int requiredSize, + int shardSize, + GeoBoundingBox geoBoundingBox, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + GeoGridAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; @@ -62,46 +70,29 @@ public InternalAggregation buildEmptyAggregation() { } @Override - protected Aggregator doCreateInternal(Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { - return aggregatorSupplier - .build( - name, - factories, - config.getValuesSource(), - precision, - geoBoundingBox, - requiredSize, - shardSize, - context, - parent, - cardinality, - metadata - ); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build( + name, + factories, + config.getValuesSource(), + precision, + geoBoundingBox, + requiredSize, + shardSize, + context, + parent, + cardinality, + metadata + ); } static void registerAggregators(ValuesSourceRegistry.Builder builder) { builder.register( GeoHashGridAggregationBuilder.REGISTRY_KEY, CoreValuesSourceType.GEOPOINT, - ( - name, - factories, - valuesSource, - precision, - geoBoundingBox, - requiredSize, - shardSize, - context, - parent, - cardinality, - metadata) -> { - GeoHashCellIdSource cellIdSource = new GeoHashCellIdSource( - (ValuesSource.GeoPoint) valuesSource, - precision, - geoBoundingBox - ); + (name, factories, valuesSource, precision, geoBoundingBox, requiredSize, shardSize, context, parent, cardinality, metadata) -> { + GeoHashCellIdSource cellIdSource = new GeoHashCellIdSource((ValuesSource.GeoPoint) valuesSource, precision, geoBoundingBox); return new GeoHashGridAggregator( name, factories, @@ -113,6 +104,8 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { cardinality, metadata ); - }, true); + }, + true + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileCellIdSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileCellIdSource.java index 2f597921834d9..20dda5d15f033 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileCellIdSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileCellIdSource.java @@ -42,9 +42,9 @@ public boolean isFloatingPoint() { @Override public SortedNumericDocValues longValues(LeafReaderContext ctx) { - return geoBoundingBox.isUnbounded() ? - new UnboundedCellValues(valuesSource.geoPointValues(ctx), precision) : - new BoundedCellValues(valuesSource.geoPointValues(ctx), precision, geoBoundingBox); + return geoBoundingBox.isUnbounded() + ? new UnboundedCellValues(valuesSource.geoPointValues(ctx), precision) + : new BoundedCellValues(valuesSource.geoPointValues(ctx), precision, geoBoundingBox); } @Override @@ -59,7 +59,6 @@ public SortedBinaryDocValues bytesValues(LeafReaderContext ctx) { private static class UnboundedCellValues extends CellValues { - UnboundedCellValues(MultiGeoPointValues geoValues, int precision) { super(geoValues, precision); } @@ -86,7 +85,7 @@ protected BoundedCellValues(MultiGeoPointValues geoValues, int precision, GeoBou final int minY = GeoTileUtils.getYTile(bbox.top(), this.tiles); final Rectangle minTile = GeoTileUtils.toBoundingBox(minX, minY, precision); // touching tiles are excluded, they need to share at least one interior point - this.minX = minTile.getMaxX() == bbox.left() ? minX + 1: minX; + this.minX = minTile.getMaxX() == bbox.left() ? minX + 1 : minX; this.minY = minTile.getMinY() == bbox.top() ? minY + 1 : minY; // compute maxX, maxY final int maxX = GeoTileUtils.getXTile(bbox.right(), this.tiles); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java index e64952e343b48..9fafadde559d1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java @@ -32,8 +32,11 @@ public class GeoTileGridAggregationBuilder extends GeoGridAggregationBuilder { GeoGridAggregatorSupplier.class ); - public static final ObjectParser PARSER = - createParser(NAME, GeoTileUtils::parsePrecision, GeoTileGridAggregationBuilder::new); + public static final ObjectParser PARSER = createParser( + NAME, + GeoTileUtils::parsePrecision, + GeoTileGridAggregationBuilder::new + ); public GeoTileGridAggregationBuilder(String name) { super(name); @@ -58,15 +61,36 @@ public GeoGridAggregationBuilder precision(int precision) { @Override protected ValuesSourceAggregatorFactory createFactory( - String name, ValuesSourceConfig config, int precision, int requiredSize, int shardSize, - GeoBoundingBox geoBoundingBox, AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, Map metadata) throws IOException { - return new GeoTileGridAggregatorFactory(name, config, precision, requiredSize, shardSize, geoBoundingBox, - context, parent, subFactoriesBuilder, metadata); + String name, + ValuesSourceConfig config, + int precision, + int requiredSize, + int shardSize, + GeoBoundingBox geoBoundingBox, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { + return new GeoTileGridAggregatorFactory( + name, + config, + precision, + requiredSize, + shardSize, + geoBoundingBox, + context, + parent, + subFactoriesBuilder, + metadata + ); } - private GeoTileGridAggregationBuilder(GeoTileGridAggregationBuilder clone, AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + private GeoTileGridAggregationBuilder( + GeoTileGridAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java index 0aa36a1838548..4547cc2e99d4a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java @@ -24,15 +24,22 @@ */ public class GeoTileGridAggregator extends GeoGridAggregator { - public GeoTileGridAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, - int requiredSize, int shardSize, AggregationContext context, - Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { + public GeoTileGridAggregator( + String name, + AggregatorFactories factories, + ValuesSource.Numeric valuesSource, + int requiredSize, + int shardSize, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { super(name, factories, valuesSource, requiredSize, shardSize, context, parent, cardinality, metadata); } @Override - InternalGeoTileGrid buildAggregation(String name, int requiredSize, List buckets, - Map metadata) { + InternalGeoTileGrid buildAggregation(String name, int requiredSize, List buckets, Map metadata) { return new InternalGeoTileGrid(name, requiredSize, buckets, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java index 3144b197e78d0..3851d0d9bb655 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java @@ -33,10 +33,18 @@ public class GeoTileGridAggregatorFactory extends ValuesSourceAggregatorFactory private final int shardSize; private final GeoBoundingBox geoBoundingBox; - GeoTileGridAggregatorFactory(String name, ValuesSourceConfig config, int precision, int requiredSize, - int shardSize, GeoBoundingBox geoBoundingBox, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata) throws IOException { + GeoTileGridAggregatorFactory( + String name, + ValuesSourceConfig config, + int precision, + int requiredSize, + int shardSize, + GeoBoundingBox geoBoundingBox, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.precision = precision; this.requiredSize = requiredSize; @@ -66,7 +74,13 @@ protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound c config.getValuesSource(), precision, geoBoundingBox, - requiredSize, shardSize, context, parent, cardinality, metadata); + requiredSize, + shardSize, + context, + parent, + cardinality, + metadata + ); } static void registerAggregators(ValuesSourceRegistry.Builder builder) { @@ -85,11 +99,7 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { parent, cardinality, metadata) -> { - GeoTileCellIdSource cellIdSource = new GeoTileCellIdSource( - (ValuesSource.GeoPoint) valuesSource, - precision, - geoBoundingBox - ); + GeoTileCellIdSource cellIdSource = new GeoTileCellIdSource((ValuesSource.GeoPoint) valuesSource, precision, geoBoundingBox); return new GeoTileGridAggregator( name, factories, @@ -102,6 +112,7 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { metadata ); }, - true); + true + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java index e4cec63c58051..79dc4a90ef327 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtils.java @@ -11,10 +11,10 @@ import org.apache.lucene.util.SloppyMath; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.core.ESSloppyMath; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.ESSloppyMath; import org.elasticsearch.geometry.Rectangle; import java.io.IOException; @@ -56,8 +56,9 @@ private GeoTileUtils() {} * Since shapes are encoded, their boundaries are to be compared to against the encoded/decoded values of LATITUDE_MASK */ public static final double NORMALIZED_LATITUDE_MASK = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(LATITUDE_MASK)); - public static final double NORMALIZED_NEGATIVE_LATITUDE_MASK = - GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(-LATITUDE_MASK)); + public static final double NORMALIZED_NEGATIVE_LATITUDE_MASK = GeoEncodingUtils.decodeLatitude( + GeoEncodingUtils.encodeLatitude(-LATITUDE_MASK) + ); /** * Bit position of the zoom value within hash - zoom is stored in the most significant 6 bits of a long number. @@ -69,7 +70,6 @@ private GeoTileUtils() {} */ private static final long X_Y_VALUE_MASK = (1L << MAX_ZOOM) - 1; - /** * Parse an integer precision (zoom level). The {@link ValueType#INT} allows it to be a number or a string. * @@ -90,8 +90,9 @@ static int parsePrecision(XContentParser parser) throws IOException, Elasticsear */ public static int checkPrecisionRange(int precision) { if (precision < 0 || precision > MAX_ZOOM) { - throw new IllegalArgumentException("Invalid geotile_grid precision of " + - precision + ". Must be between 0 and " + MAX_ZOOM + "."); + throw new IllegalArgumentException( + "Invalid geotile_grid precision of " + precision + ". Must be between 0 and " + MAX_ZOOM + "." + ); } return precision; } @@ -183,7 +184,7 @@ private static int[] parseHash(long hash) { final int zoom = (int) (hash >>> ZOOM_SHIFT); final int xTile = (int) ((hash >>> MAX_ZOOM) & X_Y_VALUE_MASK); final int yTile = (int) (hash & X_Y_VALUE_MASK); - return new int[]{zoom, xTile, yTile}; + return new int[] { zoom, xTile, yTile }; } private static long longEncode(long precision, long xTile, long yTile) { @@ -199,14 +200,17 @@ private static long longEncode(long precision, long xTile, long yTile) { private static int[] parseHash(String hashAsString) { final String[] parts = hashAsString.split("/", 4); if (parts.length != 3) { - throw new IllegalArgumentException("Invalid geotile_grid hash string of " + - hashAsString + ". Must be three integers in a form \"zoom/x/y\"."); + throw new IllegalArgumentException( + "Invalid geotile_grid hash string of " + hashAsString + ". Must be three integers in a form \"zoom/x/y\"." + ); } try { - return new int[]{Integer.parseInt(parts[0]), Integer.parseInt(parts[1]), Integer.parseInt(parts[2])}; + return new int[] { Integer.parseInt(parts[0]), Integer.parseInt(parts[1]), Integer.parseInt(parts[2]) }; } catch (NumberFormatException e) { - throw new IllegalArgumentException("Invalid geotile_grid hash string of " + - hashAsString + ". Must be three integers in a form \"zoom/x/y\".", e); + throw new IllegalArgumentException( + "Invalid geotile_grid hash string of " + hashAsString + ". Must be three integers in a form \"zoom/x/y\".", + e + ); } } @@ -266,8 +270,9 @@ public static Rectangle toBoundingBox(int xTile, int yTile, int precision) { private static int validateZXY(int zoom, int xTile, int yTile) { final int tiles = 1 << checkPrecisionRange(zoom); if (xTile < 0 || yTile < 0 || xTile >= tiles || yTile >= tiles) { - throw new IllegalArgumentException(String.format( - Locale.ROOT, "Zoom/X/Y combination is not valid: %d/%d/%d", zoom, xTile, yTile)); + throw new IllegalArgumentException( + String.format(Locale.ROOT, "Zoom/X/Y combination is not valid: %d/%d/%d", zoom, xTile, yTile) + ); } return tiles; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java index fe92f6e35c14a..8fa631b80995e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java @@ -30,8 +30,9 @@ * All geo-grid hash-encoding in a grid are of the same precision and held internally as a single long * for efficiency's sake. */ -public abstract class InternalGeoGrid - extends InternalMultiBucketAggregation, InternalGeoGridBucket> implements GeoGrid { +public abstract class InternalGeoGrid extends InternalMultiBucketAggregation< + InternalGeoGrid, + InternalGeoGridBucket> implements GeoGrid { protected final int requiredSize; protected final List buckets; @@ -144,8 +145,7 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalGeoGrid other = (InternalGeoGrid) obj; - return Objects.equals(requiredSize, other.requiredSize) - && Objects.equals(buckets, other.buckets); + return Objects.equals(requiredSize, other.requiredSize) && Objects.equals(buckets, other.buckets); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java index 1ce9c6510fa80..998f0cfbaa988 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java @@ -18,8 +18,10 @@ import java.io.IOException; import java.util.Objects; -public abstract class InternalGeoGridBucket - extends InternalMultiBucketAggregation.InternalBucket implements GeoGrid.Bucket, Comparable { +public abstract class InternalGeoGridBucket extends InternalMultiBucketAggregation.InternalBucket + implements + GeoGrid.Bucket, + Comparable { protected long hashAsLong; protected long docCount; @@ -89,9 +91,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; InternalGeoGridBucket bucket = (InternalGeoGridBucket) o; - return hashAsLong == bucket.hashAsLong && - docCount == bucket.docCount && - Objects.equals(aggregations, bucket.aggregations); + return hashAsLong == bucket.hashAsLong && docCount == bucket.docCount && Objects.equals(aggregations, bucket.aggregations); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoGrid.java index b5f6b80e2a2fb..74e2d28592ba1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoGrid.java @@ -8,9 +8,9 @@ package org.elasticsearch.search.aggregations.bucket.geogrid; -import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; import java.io.IOException; @@ -24,10 +24,12 @@ public List getBuckets() { return buckets; } - public static ObjectParser createParser(Supplier supplier, - CheckedFunction bucketParser, - CheckedFunction keyedBucketParser) { - ObjectParser parser = new ObjectParser<>(ParsedGeoGrid.class.getSimpleName(), true, supplier); + public static ObjectParser createParser( + Supplier supplier, + CheckedFunction bucketParser, + CheckedFunction keyedBucketParser + ) { + ObjectParser parser = new ObjectParser<>(ParsedGeoGrid.class.getSimpleName(), true, supplier); declareMultiBucketAggregationFields(parser, bucketParser, keyedBucketParser); return parser; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java index 849882cf7cc45..1b7ed8e97076f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java @@ -15,8 +15,11 @@ public class ParsedGeoHashGrid extends ParsedGeoGrid { - private static final ObjectParser PARSER = createParser(ParsedGeoHashGrid::new, - ParsedGeoHashGridBucket::fromXContent, ParsedGeoHashGridBucket::fromXContent); + private static final ObjectParser PARSER = createParser( + ParsedGeoHashGrid::new, + ParsedGeoHashGridBucket::fromXContent, + ParsedGeoHashGridBucket::fromXContent + ); public static ParsedGeoGrid fromXContent(XContentParser parser, String name) throws IOException { ParsedGeoGrid aggregation = PARSER.parse(parser, null); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java index 7416fb89a95ec..bd3bd36f48fac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java @@ -15,8 +15,11 @@ public class ParsedGeoTileGrid extends ParsedGeoGrid { - private static final ObjectParser PARSER = createParser(ParsedGeoTileGrid::new, - ParsedGeoTileGridBucket::fromXContent, ParsedGeoTileGridBucket::fromXContent); + private static final ObjectParser PARSER = createParser( + ParsedGeoTileGrid::new, + ParsedGeoTileGridBucket::fromXContent, + ParsedGeoTileGridBucket::fromXContent + ); public static ParsedGeoGrid fromXContent(XContentParser parser, String name) throws IOException { ParsedGeoGrid aggregation = PARSER.parse(parser, null); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/Global.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/Global.java index 234f84e062d0d..c043b2efb2569 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/Global.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/Global.java @@ -12,5 +12,4 @@ /** * A {@code global} aggregation. Defines a single bucket the holds all the documents in the search context. */ -public interface Global extends SingleBucketAggregation { -} +public interface Global extends SingleBucketAggregation {} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java index f68c6e31209e5..5853561486b7e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java @@ -61,7 +61,7 @@ public BucketCardinality bucketCardinality() { @Override protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, Builder subFactoriesBuilder) - throws IOException { + throws IOException { return new GlobalAggregatorFactory(name, context, parent, subFactoriesBuilder, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index 3a171b10329b8..ab1aefcc093ce 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -57,15 +57,22 @@ public void setScorer(Scorable scorer) throws IOException { @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - assert owningBucketOrds.length == 1 && owningBucketOrds[0] == 0: "global aggregator can only be a top level aggregator"; - return buildAggregationsForSingleBucket(owningBucketOrds, (owningBucketOrd, subAggregationResults) -> - new InternalGlobal(name, bucketDocCount(owningBucketOrd), subAggregationResults, metadata()) + assert owningBucketOrds.length == 1 && owningBucketOrds[0] == 0 : "global aggregator can only be a top level aggregator"; + return buildAggregationsForSingleBucket( + owningBucketOrds, + (owningBucketOrd, subAggregationResults) -> new InternalGlobal( + name, + bucketDocCount(owningBucketOrd), + subAggregationResults, + metadata() + ) ); } @Override public InternalAggregation buildEmptyAggregation() { throw new UnsupportedOperationException( - "global aggregations cannot serve as sub-aggregations, hence should never be called on #buildEmptyAggregations"); + "global aggregations cannot serve as sub-aggregations, hence should never be called on #buildEmptyAggregations" + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregatorFactory.java index 749c8ef570a78..503bb800ab223 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregatorFactory.java @@ -20,11 +20,13 @@ public class GlobalAggregatorFactory extends AggregatorFactory { - public GlobalAggregatorFactory(String name, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactories, - Map metadata) throws IOException { + public GlobalAggregatorFactory( + String name, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactories, + Map metadata + ) throws IOException { super(name, context, parent, subFactories, metadata); } @@ -32,8 +34,14 @@ public GlobalAggregatorFactory(String name, public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { if (parent != null) { - throw new AggregationExecutionException("Aggregation [" + parent.name() + "] cannot have a global " + "sub-aggregation [" + name - + "]. Global aggregations can only be defined as top level aggregations"); + throw new AggregationExecutionException( + "Aggregation [" + + parent.name() + + "] cannot have a global " + + "sub-aggregation [" + + name + + "]. Global aggregations can only be defined as top level aggregations" + ); } if (cardinality != CardinalityUpperBound.ONE) { throw new AggregationExecutionException("Aggregation [" + name() + "] must have cardinality 1 but was [" + cardinality + "]"); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java index 2b5092d79eb94..ba4a9ed2cdab4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java @@ -78,30 +78,39 @@ public AbstractHistogramAggregator( @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, - (bucketValue, docCount, subAggregationResults) -> { - double roundKey = Double.longBitsToDouble(bucketValue); - double key = roundKey * interval + offset; - return new InternalHistogram.Bucket(key, docCount, keyed, formatter, subAggregationResults); - }, (owningBucketOrd, buckets) -> { - // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order - CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); + return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { + double roundKey = Double.longBitsToDouble(bucketValue); + double key = roundKey * interval + offset; + return new InternalHistogram.Bucket(key, docCount, keyed, formatter, subAggregationResults); + }, (owningBucketOrd, buckets) -> { + // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order + CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); - EmptyBucketInfo emptyBucketInfo = null; - if (minDocCount == 0) { - emptyBucketInfo = new EmptyBucketInfo(interval, offset, getEffectiveMin(extendedBounds), - getEffectiveMax(extendedBounds), buildEmptySubAggregations()); - } - return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, metadata()); - }); + EmptyBucketInfo emptyBucketInfo = null; + if (minDocCount == 0) { + emptyBucketInfo = new EmptyBucketInfo( + interval, + offset, + getEffectiveMin(extendedBounds), + getEffectiveMax(extendedBounds), + buildEmptySubAggregations() + ); + } + return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, metadata()); + }); } @Override public InternalAggregation buildEmptyAggregation() { InternalHistogram.EmptyBucketInfo emptyBucketInfo = null; if (minDocCount == 0) { - emptyBucketInfo = new InternalHistogram.EmptyBucketInfo(interval, offset, getEffectiveMin(extendedBounds), - getEffectiveMax(extendedBounds), buildEmptySubAggregations()); + emptyBucketInfo = new InternalHistogram.EmptyBucketInfo( + interval, + offset, + getEffectiveMin(extendedBounds), + getEffectiveMax(extendedBounds), + buildEmptySubAggregations() + ); } return new InternalHistogram(name, Collections.emptyList(), order, minDocCount, emptyBucketInfo, formatter, keyed, metadata()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java index d0a69abef66c7..fc07566236acd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java @@ -9,13 +9,13 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; @@ -46,8 +46,10 @@ public class AutoDateHistogramAggregationBuilder extends ValuesSourceAggregation private static final ParseField NUM_BUCKETS_FIELD = new ParseField("buckets"); private static final ParseField MINIMUM_INTERVAL_FIELD = new ParseField("minimum_interval"); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, AutoDateHistogramAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + AutoDateHistogramAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, true); PARSER.declareInt(AutoDateHistogramAggregationBuilder::setNumBuckets, NUM_BUCKETS_FIELD); @@ -58,7 +60,7 @@ public class AutoDateHistogramAggregationBuilder extends ValuesSourceAggregation entry(Rounding.DateTimeUnit.YEAR_OF_CENTURY, "year"), entry(Rounding.DateTimeUnit.MONTH_OF_YEAR, "month"), entry(Rounding.DateTimeUnit.DAY_OF_MONTH, "day"), - entry( Rounding.DateTimeUnit.HOUR_OF_DAY, "hour"), + entry(Rounding.DateTimeUnit.HOUR_OF_DAY, "hour"), entry(Rounding.DateTimeUnit.MINUTES_OF_HOUR, "minute"), entry(Rounding.DateTimeUnit.SECOND_OF_MINUTE, "second") ); @@ -78,18 +80,23 @@ static RoundingInfo[] buildRoundings(ZoneId timeZone, String minimumInterval) { int indexToSliceFrom = 0; RoundingInfo[] roundings = new RoundingInfo[6]; - roundings[0] = new RoundingInfo(Rounding.DateTimeUnit.SECOND_OF_MINUTE, - timeZone, 1000L, "s",1, 5, 10, 30); - roundings[1] = new RoundingInfo(Rounding.DateTimeUnit.MINUTES_OF_HOUR, timeZone, - 60 * 1000L, "m", 1, 5, 10, 30); - roundings[2] = new RoundingInfo(Rounding.DateTimeUnit.HOUR_OF_DAY, timeZone, - 60 * 60 * 1000L, "h", 1, 3, 12); - roundings[3] = new RoundingInfo(Rounding.DateTimeUnit.DAY_OF_MONTH, timeZone, - 24 * 60 * 60 * 1000L, "d", 1, 7); - roundings[4] = new RoundingInfo(Rounding.DateTimeUnit.MONTH_OF_YEAR, timeZone, - 30 * 24 * 60 * 60 * 1000L, "M", 1, 3); - roundings[5] = new RoundingInfo(Rounding.DateTimeUnit.YEAR_OF_CENTURY, timeZone, - 365 * 24 * 60 * 60 * 1000L, "y", 1, 5, 10, 20, 50, 100); + roundings[0] = new RoundingInfo(Rounding.DateTimeUnit.SECOND_OF_MINUTE, timeZone, 1000L, "s", 1, 5, 10, 30); + roundings[1] = new RoundingInfo(Rounding.DateTimeUnit.MINUTES_OF_HOUR, timeZone, 60 * 1000L, "m", 1, 5, 10, 30); + roundings[2] = new RoundingInfo(Rounding.DateTimeUnit.HOUR_OF_DAY, timeZone, 60 * 60 * 1000L, "h", 1, 3, 12); + roundings[3] = new RoundingInfo(Rounding.DateTimeUnit.DAY_OF_MONTH, timeZone, 24 * 60 * 60 * 1000L, "d", 1, 7); + roundings[4] = new RoundingInfo(Rounding.DateTimeUnit.MONTH_OF_YEAR, timeZone, 30 * 24 * 60 * 60 * 1000L, "M", 1, 3); + roundings[5] = new RoundingInfo( + Rounding.DateTimeUnit.YEAR_OF_CENTURY, + timeZone, + 365 * 24 * 60 * 60 * 1000L, + "y", + 1, + 5, + 10, + 20, + 50, + 100 + ); for (int i = 0; i < roundings.length; i++) { RoundingInfo roundingInfo = roundings[i]; @@ -127,8 +134,11 @@ protected void innerWriteTo(StreamOutput out) throws IOException { } } - protected AutoDateHistogramAggregationBuilder(AutoDateHistogramAggregationBuilder clone, Builder factoriesBuilder, - Map metadata) { + protected AutoDateHistogramAggregationBuilder( + AutoDateHistogramAggregationBuilder clone, + Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.numBuckets = clone.numBuckets; this.minimumIntervalExpression = clone.minimumIntervalExpression; @@ -160,8 +170,9 @@ public String getMinimumIntervalExpression() { public AutoDateHistogramAggregationBuilder setMinimumIntervalExpression(String minimumIntervalExpression) { if (minimumIntervalExpression != null && ALLOWED_INTERVALS.containsValue(minimumIntervalExpression) == false) { - throw new IllegalArgumentException(MINIMUM_INTERVAL_FIELD.getPreferredName() + - " must be one of [" + ALLOWED_INTERVALS.values().toString() + "]"); + throw new IllegalArgumentException( + MINIMUM_INTERVAL_FIELD.getPreferredName() + " must be one of [" + ALLOWED_INTERVALS.values().toString() + "]" + ); } this.minimumIntervalExpression = minimumIntervalExpression; return this; @@ -185,26 +196,38 @@ public BucketCardinality bucketCardinality() { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { - AutoDateHistogramAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + Builder subFactoriesBuilder + ) throws IOException { + AutoDateHistogramAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); RoundingInfo[] roundings = buildRoundings(timeZone(), getMinimumIntervalExpression()); - int maxRoundingInterval = Arrays.stream(roundings,0, roundings.length-1) + int maxRoundingInterval = Arrays.stream(roundings, 0, roundings.length - 1) .map(rounding -> rounding.innerIntervals) .flatMapToInt(Arrays::stream) .boxed() - .reduce(Integer::max).get(); + .reduce(Integer::max) + .get(); Settings settings = context.getIndexSettings().getNodeSettings(); int maxBuckets = MultiBucketConsumerService.MAX_BUCKET_SETTING.get(settings); int bucketCeiling = maxBuckets / maxRoundingInterval; if (numBuckets > bucketCeiling) { - throw new IllegalArgumentException(NUM_BUCKETS_FIELD.getPreferredName()+ - " must be less than " + bucketCeiling); + throw new IllegalArgumentException(NUM_BUCKETS_FIELD.getPreferredName() + " must be less than " + bucketCeiling); } - return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, - parent, subFactoriesBuilder, metadata, aggregatorSupplier); + return new AutoDateHistogramAggregatorFactory( + name, + config, + numBuckets, + roundings, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } static Rounding createRounding(Rounding.DateTimeUnit interval, ZoneId timeZone) { @@ -244,11 +267,13 @@ public static class RoundingInfo implements Writeable { final String unitAbbreviation; final String dateTimeUnit; - public RoundingInfo(Rounding.DateTimeUnit dateTimeUnit, - ZoneId timeZone, - long roughEstimateDurationMillis, - String unitAbbreviation, - int... innerIntervals) { + public RoundingInfo( + Rounding.DateTimeUnit dateTimeUnit, + ZoneId timeZone, + long roughEstimateDurationMillis, + String unitAbbreviation, + int... innerIntervals + ) { this.rounding = createRounding(dateTimeUnit, timeZone); this.roughEstimateDurationMillis = roughEstimateDurationMillis; this.unitAbbreviation = unitAbbreviation; @@ -281,7 +306,9 @@ public int getMaximumInnerInterval() { return innerIntervals[innerIntervals.length - 1]; } - public String getDateTimeUnit() { return this.dateTimeUnit; } + public String getDateTimeUnit() { + return this.dateTimeUnit; + } public long getRoughEstimateDurationMillis() { return roughEstimateDurationMillis; @@ -307,8 +334,7 @@ public boolean equals(Object obj) { RoundingInfo other = (RoundingInfo) obj; return Objects.equals(rounding, other.rounding) && Objects.deepEquals(innerIntervals, other.innerIntervals) - && Objects.equals(dateTimeUnit, other.dateTimeUnit) - ; + && Objects.equals(dateTimeUnit, other.dateTimeUnit); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index e2e3e93f4d095..ebfc2dcbbfa1b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -12,10 +12,10 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Rounding; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -66,26 +66,8 @@ static AutoDateHistogramAggregator build( Map metadata ) throws IOException { return cardinality == CardinalityUpperBound.ONE - ? new FromSingle( - name, - factories, - targetBuckets, - roundingInfos, - valuesSourceConfig, - context, - parent, - metadata - ) - : new FromMany( - name, - factories, - targetBuckets, - roundingInfos, - valuesSourceConfig, - context, - parent, - metadata - ); + ? new FromSingle(name, factories, targetBuckets, roundingInfos, valuesSourceConfig, context, parent, metadata) + : new FromMany(name, factories, targetBuckets, roundingInfos, valuesSourceConfig, context, parent, metadata); } private final ValuesSource.Numeric valuesSource; @@ -242,16 +224,7 @@ private static class FromSingle extends AutoDateHistogramAggregator { Aggregator parent, Map metadata ) throws IOException { - super( - name, - factories, - targetBuckets, - roundingInfos, - valuesSourceConfig, - context, - parent, - metadata - ); + super(name, factories, targetBuckets, roundingInfos, valuesSourceConfig, context, parent, metadata); preparedRounding = prepareRounding(0); bucketOrds = new LongKeyedBucketOrds.FromSingle(bigArrays()); @@ -446,16 +419,7 @@ private static class FromMany extends AutoDateHistogramAggregator { Aggregator parent, Map metadata ) throws IOException { - super( - name, - factories, - targetBuckets, - roundingInfos, - valuesSourceConfig, - context, - parent, - metadata - ); + super(name, factories, targetBuckets, roundingInfos, valuesSourceConfig, context, parent, metadata); assert roundingInfos.length < 127 : "Rounding must fit in a signed byte"; roundingIndices = bigArrays().newByteArray(1, true); mins = bigArrays().newLongArray(1, false); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java index 9ec7ffdb318fb..a1f1427da8574 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java @@ -76,15 +76,17 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { private final int numBuckets; private RoundingInfo[] roundingInfos; - public AutoDateHistogramAggregatorFactory(String name, - ValuesSourceConfig config, - int numBuckets, - RoundingInfo[] roundingInfos, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - AutoDateHistogramAggregatorSupplier aggregatorSupplier) throws IOException { + public AutoDateHistogramAggregatorFactory( + String name, + ValuesSourceConfig config, + int numBuckets, + RoundingInfo[] roundingInfos, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + AutoDateHistogramAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; @@ -93,20 +95,9 @@ public AutoDateHistogramAggregatorFactory(String name, } @Override - protected Aggregator doCreateInternal(Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { - return aggregatorSupplier.build( - name, - factories, - numBuckets, - roundingInfos, - config, - context, - parent, - cardinality, - metadata - ); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build(name, factories, numBuckets, roundingInfos, config, context, parent, cardinality, metadata); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorSupplier.java index ff3715d3b81b3..be69b2c5aa68c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorSupplier.java @@ -25,8 +25,7 @@ Aggregator build( AggregatorFactories factories, int numBuckets, AutoDateHistogramAggregationBuilder.RoundingInfo[] roundingInfos, - @Nullable - ValuesSourceConfig valuesSourceConfig, + @Nullable ValuesSourceConfig valuesSourceConfig, AggregationContext aggregationContext, Aggregator parent, CardinalityUpperBound cardinality, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java index 8d6bd34fde98c..754b1f957bb82 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java @@ -12,10 +12,10 @@ import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -42,32 +42,36 @@ * A builder for histograms on date fields. */ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuilder - implements DateIntervalConsumer { + implements + DateIntervalConsumer { public static final String NAME = "date_histogram"; public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>(NAME, DateHistogramAggregationSupplier.class); public static final Map DATE_FIELD_UNITS = Map.ofEntries( - entry("year", Rounding.DateTimeUnit.YEAR_OF_CENTURY), - entry("1y", Rounding.DateTimeUnit.YEAR_OF_CENTURY), - entry("quarter", Rounding.DateTimeUnit.QUARTER_OF_YEAR), - entry("1q", Rounding.DateTimeUnit.QUARTER_OF_YEAR), - entry("month", Rounding.DateTimeUnit.MONTH_OF_YEAR), - entry("1M", Rounding.DateTimeUnit.MONTH_OF_YEAR), - entry("week", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR), - entry("1w", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR), - entry("day", Rounding.DateTimeUnit.DAY_OF_MONTH), - entry("1d", Rounding.DateTimeUnit.DAY_OF_MONTH), - entry("hour", Rounding.DateTimeUnit.HOUR_OF_DAY), - entry("1h", Rounding.DateTimeUnit.HOUR_OF_DAY), - entry("minute", Rounding.DateTimeUnit.MINUTES_OF_HOUR), - entry("1m", Rounding.DateTimeUnit.MINUTES_OF_HOUR), - entry("second", Rounding.DateTimeUnit.SECOND_OF_MINUTE), - entry("1s", Rounding.DateTimeUnit.SECOND_OF_MINUTE)); - - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, DateHistogramAggregationBuilder::new); + entry("year", Rounding.DateTimeUnit.YEAR_OF_CENTURY), + entry("1y", Rounding.DateTimeUnit.YEAR_OF_CENTURY), + entry("quarter", Rounding.DateTimeUnit.QUARTER_OF_YEAR), + entry("1q", Rounding.DateTimeUnit.QUARTER_OF_YEAR), + entry("month", Rounding.DateTimeUnit.MONTH_OF_YEAR), + entry("1M", Rounding.DateTimeUnit.MONTH_OF_YEAR), + entry("week", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR), + entry("1w", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR), + entry("day", Rounding.DateTimeUnit.DAY_OF_MONTH), + entry("1d", Rounding.DateTimeUnit.DAY_OF_MONTH), + entry("hour", Rounding.DateTimeUnit.HOUR_OF_DAY), + entry("1h", Rounding.DateTimeUnit.HOUR_OF_DAY), + entry("minute", Rounding.DateTimeUnit.MINUTES_OF_HOUR), + entry("1m", Rounding.DateTimeUnit.MINUTES_OF_HOUR), + entry("second", Rounding.DateTimeUnit.SECOND_OF_MINUTE), + entry("1s", Rounding.DateTimeUnit.SECOND_OF_MINUTE) + ); + + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + DateHistogramAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, true); DateIntervalWrapper.declareIntervalFields(PARSER); @@ -84,14 +88,25 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil PARSER.declareLong(DateHistogramAggregationBuilder::minDocCount, Histogram.MIN_DOC_COUNT_FIELD); - PARSER.declareField(DateHistogramAggregationBuilder::extendedBounds, parser -> LongBounds.PARSER.apply(parser, null), - Histogram.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField( + DateHistogramAggregationBuilder::extendedBounds, + parser -> LongBounds.PARSER.apply(parser, null), + Histogram.EXTENDED_BOUNDS_FIELD, + ObjectParser.ValueType.OBJECT + ); - PARSER.declareField(DateHistogramAggregationBuilder::hardBounds, parser -> LongBounds.PARSER.apply(parser, null), - Histogram.HARD_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField( + DateHistogramAggregationBuilder::hardBounds, + parser -> LongBounds.PARSER.apply(parser, null), + Histogram.HARD_BOUNDS_FIELD, + ObjectParser.ValueType.OBJECT + ); - PARSER.declareObjectArray(DateHistogramAggregationBuilder::order, (p, c) -> InternalOrder.Parser.parseOrderParam(p), - Histogram.ORDER_FIELD); + PARSER.declareObjectArray( + DateHistogramAggregationBuilder::order, + (p, c) -> InternalOrder.Parser.parseOrderParam(p), + Histogram.ORDER_FIELD + ); } public static void registerAggregators(ValuesSourceRegistry.Builder builder) { @@ -111,8 +126,11 @@ public DateHistogramAggregationBuilder(String name) { super(name); } - protected DateHistogramAggregationBuilder(DateHistogramAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, Map metadata) { + protected DateHistogramAggregationBuilder( + DateHistogramAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.dateHistogramInterval = clone.dateHistogramInterval; this.offset = clone.offset; @@ -147,7 +165,6 @@ protected ValuesSourceType defaultValueSourceType() { return CoreValuesSourceType.DATE; } - @Override protected void innerWriteTo(StreamOutput out) throws IOException { InternalOrder.Streams.writeHistogramOrder(order, out); @@ -235,14 +252,18 @@ public DateHistogramAggregationBuilder offset(String offset) { */ public static long parseStringOffset(String offset) { if (offset.charAt(0) == '-') { - return -TimeValue - .parseTimeValue(offset.substring(1), null, DateHistogramAggregationBuilder.class.getSimpleName() + ".parseOffset") - .millis(); + return -TimeValue.parseTimeValue( + offset.substring(1), + null, + DateHistogramAggregationBuilder.class.getSimpleName() + ".parseOffset" + ).millis(); } int beginIndex = offset.charAt(0) == '+' ? 1 : 0; - return TimeValue - .parseTimeValue(offset.substring(beginIndex), null, DateHistogramAggregationBuilder.class.getSimpleName() + ".parseOffset") - .millis(); + return TimeValue.parseTimeValue( + offset.substring(beginIndex), + null, + DateHistogramAggregationBuilder.class.getSimpleName() + ".parseOffset" + ).millis(); } /** Return extended bounds for this histogram, or {@code null} if none are set. */ @@ -260,7 +281,6 @@ public DateHistogramAggregationBuilder extendedBounds(LongBounds extendedBounds) return this; } - /** Return hard bounds for this histogram, or {@code null} if none are set. */ public LongBounds hardBounds() { return hardBounds; @@ -286,7 +306,7 @@ public DateHistogramAggregationBuilder order(BucketOrder order) { if (order == null) { throw new IllegalArgumentException("[order] must not be null: [" + name + "]"); } - if(order instanceof CompoundOrder || InternalOrder.isKeyOrder(order)) { + if (order instanceof CompoundOrder || InternalOrder.isKeyOrder(order)) { this.order = order; // if order already contains a tie-breaker we are good to go } else { // otherwise add a tie-breaker by using a compound order this.order = BucketOrder.compound(order); @@ -331,7 +351,8 @@ public long minDocCount() { public DateHistogramAggregationBuilder minDocCount(long minDocCount) { if (minDocCount < 0) { throw new IllegalArgumentException( - "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]"); + "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]" + ); } this.minDocCount = minDocCount; return this; @@ -382,12 +403,13 @@ protected ValuesSourceRegistry.RegistryKey getRegistryKey() { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - DateHistogramAggregationSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + DateHistogramAggregationSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); final ZoneId tz = timeZone(); final Rounding rounding = dateHistogramInterval.createRounding(tz, offset); @@ -395,27 +417,39 @@ protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, LongBounds roundedBounds = null; if (this.extendedBounds != null) { // parse any string bounds to longs and round - roundedBounds = this.extendedBounds.parseAndValidate(name, "extended_bounds" , context::nowInMillis, config.format()) + roundedBounds = this.extendedBounds.parseAndValidate(name, "extended_bounds", context::nowInMillis, config.format()) .round(rounding); } LongBounds roundedHardBounds = null; if (this.hardBounds != null) { // parse any string bounds to longs and round - roundedHardBounds = this.hardBounds.parseAndValidate(name, "hard_bounds" , context::nowInMillis, config.format()) + roundedHardBounds = this.hardBounds.parseAndValidate(name, "hard_bounds", context::nowInMillis, config.format()) .round(rounding); } if (roundedBounds != null && roundedHardBounds != null) { - if (roundedBounds.getMax() != null && - roundedHardBounds.getMax() != null && roundedBounds.getMax() > roundedHardBounds.getMax()) { - throw new IllegalArgumentException("Extended bounds have to be inside hard bounds, hard bounds: [" + - hardBounds + "], extended bounds: [" + extendedBounds + "]"); + if (roundedBounds.getMax() != null + && roundedHardBounds.getMax() != null + && roundedBounds.getMax() > roundedHardBounds.getMax()) { + throw new IllegalArgumentException( + "Extended bounds have to be inside hard bounds, hard bounds: [" + + hardBounds + + "], extended bounds: [" + + extendedBounds + + "]" + ); } - if (roundedBounds.getMin() != null && - roundedHardBounds.getMin() != null && roundedBounds.getMin() < roundedHardBounds.getMin()) { - throw new IllegalArgumentException("Extended bounds have to be inside hard bounds, hard bounds: [" + - hardBounds + "], extended bounds: [" + extendedBounds + "]"); + if (roundedBounds.getMin() != null + && roundedHardBounds.getMin() != null + && roundedBounds.getMin() < roundedHardBounds.getMin()) { + throw new IllegalArgumentException( + "Extended bounds have to be inside hard bounds, hard bounds: [" + + hardBounds + + "], extended bounds: [" + + extendedBounds + + "]" + ); } } @@ -432,7 +466,8 @@ protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, parent, subFactoriesBuilder, metadata, - aggregatorSupplier); + aggregatorSupplier + ); } @Override @@ -447,11 +482,11 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; DateHistogramAggregationBuilder other = (DateHistogramAggregationBuilder) obj; return Objects.equals(order, other.order) - && Objects.equals(keyed, other.keyed) - && Objects.equals(minDocCount, other.minDocCount) - && Objects.equals(dateHistogramInterval, other.dateHistogramInterval) - && Objects.equals(offset, other.offset) - && Objects.equals(extendedBounds, other.extendedBounds) - && Objects.equals(hardBounds, other.hardBounds); + && Objects.equals(keyed, other.keyed) + && Objects.equals(minDocCount, other.minDocCount) + && Objects.equals(dateHistogramInterval, other.dateHistogramInterval) + && Objects.equals(offset, other.offset) + && Objects.equals(extendedBounds, other.extendedBounds) + && Objects.equals(hardBounds, other.hardBounds); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationSupplier.java index 51c7b7511a3d2..1529d0fab6cc9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationSupplier.java @@ -8,8 +8,8 @@ package org.elasticsearch.search.aggregations.bucket.histogram; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.Rounding; +import org.elasticsearch.core.Nullable; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.BucketOrder; @@ -22,17 +22,19 @@ @FunctionalInterface public interface DateHistogramAggregationSupplier { - Aggregator build(String name, - AggregatorFactories factories, - Rounding rounding, - BucketOrder order, - boolean keyed, - long minDocCount, - @Nullable LongBounds extendedBounds, - @Nullable LongBounds hardBounds, - ValuesSourceConfig valuesSourceConfig, - AggregationContext context, - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException; + Aggregator build( + String name, + AggregatorFactories factories, + Rounding rounding, + BucketOrder order, + boolean keyed, + long minDocCount, + @Nullable LongBounds extendedBounds, + @Nullable LongBounds hardBounds, + ValuesSourceConfig valuesSourceConfig, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index cbeaf2a484903..0085fef9754ea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -11,10 +11,10 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.Rounding.DateTimeUnit; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AdaptingAggregator; @@ -297,28 +297,50 @@ public void collect(int doc, long owningBucketOrd) throws IOException { @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, + return buildAggregationsForVariableBuckets( + owningBucketOrds, + bucketOrds, (bucketValue, docCount, subAggregationResults) -> { return new InternalDateHistogram.Bucket(bucketValue, docCount, keyed, formatter, subAggregationResults); - }, (owningBucketOrd, buckets) -> { + }, + (owningBucketOrd, buckets) -> { // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); InternalDateHistogram.EmptyBucketInfo emptyBucketInfo = minDocCount == 0 - ? new InternalDateHistogram.EmptyBucketInfo(rounding.withoutOffset(), buildEmptySubAggregations(), extendedBounds) - : null; - return new InternalDateHistogram(name, buckets, order, minDocCount, rounding.offset(), emptyBucketInfo, formatter, - keyed, metadata()); - }); + ? new InternalDateHistogram.EmptyBucketInfo(rounding.withoutOffset(), buildEmptySubAggregations(), extendedBounds) + : null; + return new InternalDateHistogram( + name, + buckets, + order, + minDocCount, + rounding.offset(), + emptyBucketInfo, + formatter, + keyed, + metadata() + ); + } + ); } @Override public InternalAggregation buildEmptyAggregation() { InternalDateHistogram.EmptyBucketInfo emptyBucketInfo = minDocCount == 0 - ? new InternalDateHistogram.EmptyBucketInfo(rounding.withoutOffset(), buildEmptySubAggregations(), extendedBounds) - : null; - return new InternalDateHistogram(name, Collections.emptyList(), order, minDocCount, rounding.offset(), emptyBucketInfo, formatter, - keyed, metadata()); + ? new InternalDateHistogram.EmptyBucketInfo(rounding.withoutOffset(), buildEmptySubAggregations(), extendedBounds) + : null; + return new InternalDateHistogram( + name, + Collections.emptyList(), + order, + minDocCount, + rounding.offset(), + emptyBucketInfo, + formatter, + keyed, + metadata() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java index 054e7adb7c668..2f996a1fe0a9b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java @@ -34,7 +34,8 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { DateHistogramAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.DATE, CoreValuesSourceType.NUMERIC), DateHistogramAggregator::build, - true); + true + ); builder.register(DateHistogramAggregationBuilder.REGISTRY_KEY, CoreValuesSourceType.RANGE, DateRangeHistogramAggregator::new, true); @@ -118,11 +119,8 @@ public long minDocCount() { } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { return aggregatorSupplier.build( name, factories, @@ -142,7 +140,21 @@ protected Aggregator doCreateInternal( @Override protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - return new DateHistogramAggregator(name, factories, rounding, null, order, keyed, minDocCount, extendedBounds, hardBounds, - config, context, parent, CardinalityUpperBound.NONE, metadata); + return new DateHistogramAggregator( + name, + factories, + rounding, + null, + order, + keyed, + minDocCount, + extendedBounds, + hardBounds, + config, + context, + parent, + CardinalityUpperBound.NONE, + metadata + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java index d0147d7ac30b0..1b1c8e8eb5285 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java @@ -13,9 +13,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.TimeValue; import java.io.IOException; import java.util.Objects; @@ -72,7 +72,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(expression); } - @Override public String toString() { return expression; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalConsumer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalConsumer.java index adb0e873c021c..6da720fd46fc7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalConsumer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalConsumer.java @@ -16,5 +16,6 @@ */ public interface DateIntervalConsumer { T calendarInterval(DateHistogramInterval interval); + T fixedInterval(DateHistogramInterval interval); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java index 824ca8390fb63..5c5c97a7f4af8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java @@ -40,8 +40,8 @@ */ public class DateIntervalWrapper implements ToXContentFragment, Writeable { private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(DateHistogramAggregationBuilder.class); - private static final String DEPRECATION_TEXT = "[interval] on [date_histogram] is deprecated, use [fixed_interval] or " + - "[calendar_interval] in the future."; + private static final String DEPRECATION_TEXT = "[interval] on [date_histogram] is deprecated, use [fixed_interval] or " + + "[calendar_interval] in the future."; private static final ParseField FIXED_INTERVAL_FIELD = new ParseField("fixed_interval"); private static final ParseField CALENDAR_INTERVAL_FIELD = new ParseField("calendar_interval"); @@ -89,6 +89,7 @@ public String getPreferredName() { } private String preferredName; + IntervalTypeEnum(String preferredName) { this.preferredName = preferredName; } @@ -121,11 +122,19 @@ public static > void declareIntervalFields(Obj } }, Histogram.INTERVAL_FIELD.forRestApiVersion(equalTo(RestApiVersion.V_7)), ObjectParser.ValueType.LONG); - parser.declareField(DateIntervalConsumer::calendarInterval, - p -> new DateHistogramInterval(p.text()), CALENDAR_INTERVAL_FIELD, ObjectParser.ValueType.STRING); - - parser.declareField(DateIntervalConsumer::fixedInterval, - p -> new DateHistogramInterval(p.text()), FIXED_INTERVAL_FIELD, ObjectParser.ValueType.STRING); + parser.declareField( + DateIntervalConsumer::calendarInterval, + p -> new DateHistogramInterval(p.text()), + CALENDAR_INTERVAL_FIELD, + ObjectParser.ValueType.STRING + ); + + parser.declareField( + DateIntervalConsumer::fixedInterval, + p -> new DateHistogramInterval(p.text()), + FIXED_INTERVAL_FIELD, + ObjectParser.ValueType.STRING + ); } public DateIntervalWrapper() {} @@ -163,8 +172,7 @@ public void calendarInterval(DateHistogramInterval interval) { throw new IllegalArgumentException("[interval] must not be null: [date_histogram]"); } if (DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString()) == null) { - throw new IllegalArgumentException("The supplied interval [" + interval +"] could not be parsed " + - "as a calendar interval."); + throw new IllegalArgumentException("The supplied interval [" + interval + "] could not be parsed " + "as a calendar interval."); } setIntervalType(IntervalTypeEnum.CALENDAR); this.dateHistogramInterval = interval; @@ -254,8 +262,9 @@ private void setIntervalType(IntervalTypeEnum type) { if (type.isValid() == false || intervalType.isValid() == false) { throw new IllegalArgumentException("Unknown interval type."); } - throw new IllegalArgumentException("Cannot use [" + type.getPreferredName() + "] with [" + intervalType.getPreferredName() + - "] configuration option."); + throw new IllegalArgumentException( + "Cannot use [" + type.getPreferredName() + "] with [" + intervalType.getPreferredName() + "] configuration option." + ); } public boolean isEmpty() { @@ -273,7 +282,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (intervalType.equals(IntervalTypeEnum.FIXED)){ + if (intervalType.equals(IntervalTypeEnum.FIXED)) { builder.field(FIXED_INTERVAL_FIELD.getPreferredName(), dateHistogramInterval.toString()); } else if (intervalType.equals(IntervalTypeEnum.CALENDAR)) { builder.field(CALENDAR_INTERVAL_FIELD.getPreferredName(), dateHistogramInterval.toString()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java index 86b4574e8f145..1a34a01af0f19 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java @@ -11,8 +11,8 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.Rounding; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.mapper.RangeFieldMapper; @@ -93,8 +93,9 @@ class DateRangeHistogramAggregator extends BucketsAggregator { this.valuesSource = valuesSourceConfig.hasValues() ? (ValuesSource.Range) valuesSourceConfig.getValuesSource() : null; this.formatter = valuesSourceConfig.format(); if (this.valuesSource.rangeType() != RangeType.DATE) { - throw new IllegalArgumentException("Expected date range type but found range type [" + this.valuesSource.rangeType().name - + "]"); + throw new IllegalArgumentException( + "Expected date range type but found range type [" + this.valuesSource.rangeType().name + "]" + ); } bucketOrds = LongKeyedBucketOrds.build(bigArrays(), cardinality); @@ -134,14 +135,15 @@ public void collect(int doc, long owningBucketOrd) throws IOException { // The encoding should ensure that this assert is always true. assert from >= previousFrom : "Start of range not >= previous start"; final Long to = (Long) range.getTo(); - final long effectiveFrom = (hardBounds != null && hardBounds.getMin() != null) ? - max(from, hardBounds.getMin()) : from; - final long effectiveTo = (hardBounds != null && hardBounds.getMax() != null) ? - min(to, hardBounds.getMax()) : to; + final long effectiveFrom = (hardBounds != null && hardBounds.getMin() != null) + ? max(from, hardBounds.getMin()) + : from; + final long effectiveTo = (hardBounds != null && hardBounds.getMax() != null) + ? min(to, hardBounds.getMax()) + : to; final long startKey = preparedRounding.round(effectiveFrom); - final long endKey = preparedRounding.round(effectiveTo); - for (long key = max(startKey, previousKey); key <= endKey; - key = preparedRounding.nextRoundingValue(key)) { + final long endKey = preparedRounding.round(effectiveTo); + for (long key = max(startKey, previousKey); key <= endKey; key = preparedRounding.nextRoundingValue(key)) { if (key == previousKey) { continue; } @@ -165,13 +167,18 @@ public void collect(int doc, long owningBucketOrd) throws IOException { }; } - - @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, - (bucketValue, docCount, subAggregationResults) -> - new InternalDateHistogram.Bucket(bucketValue, docCount, keyed, formatter, subAggregationResults), + return buildAggregationsForVariableBuckets( + owningBucketOrds, + bucketOrds, + (bucketValue, docCount, subAggregationResults) -> new InternalDateHistogram.Bucket( + bucketValue, + docCount, + keyed, + formatter, + subAggregationResults + ), (owningBucketOrd, buckets) -> { // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); @@ -179,20 +186,39 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I // value source will be null for unmapped fields // Important: use `rounding` here, not `shardRounding` InternalDateHistogram.EmptyBucketInfo emptyBucketInfo = minDocCount == 0 - ? new InternalDateHistogram.EmptyBucketInfo(rounding.withoutOffset(), buildEmptySubAggregations(), extendedBounds) - : null; - return new InternalDateHistogram(name, buckets, order, minDocCount, rounding.offset(), emptyBucketInfo, formatter, - keyed, metadata()); - }); + ? new InternalDateHistogram.EmptyBucketInfo(rounding.withoutOffset(), buildEmptySubAggregations(), extendedBounds) + : null; + return new InternalDateHistogram( + name, + buckets, + order, + minDocCount, + rounding.offset(), + emptyBucketInfo, + formatter, + keyed, + metadata() + ); + } + ); } @Override public InternalAggregation buildEmptyAggregation() { InternalDateHistogram.EmptyBucketInfo emptyBucketInfo = minDocCount == 0 - ? new InternalDateHistogram.EmptyBucketInfo(rounding, buildEmptySubAggregations(), extendedBounds) - : null; - return new InternalDateHistogram(name, Collections.emptyList(), order, minDocCount, rounding.offset(), emptyBucketInfo, formatter, - keyed, metadata()); + ? new InternalDateHistogram.EmptyBucketInfo(rounding, buildEmptySubAggregations(), extendedBounds) + : null; + return new InternalDateHistogram( + name, + Collections.emptyList(), + order, + minDocCount, + rounding.offset(), + emptyBucketInfo, + formatter, + keyed, + metadata() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DoubleBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DoubleBounds.java index acc571dab6c18..9984776facd2a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DoubleBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DoubleBounds.java @@ -8,12 +8,12 @@ package org.elasticsearch.search.aggregations.bucket.histogram; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.InstantiatingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -36,12 +36,23 @@ public class DoubleBounds implements ToXContentFragment, Writeable { static final InstantiatingObjectParser PARSER; static { - InstantiatingObjectParser.Builder parser = - InstantiatingObjectParser.builder("double_bounds", false, DoubleBounds.class); - parser.declareField(optionalConstructorArg(), p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : p.doubleValue(), - MIN_FIELD, ObjectParser.ValueType.DOUBLE_OR_NULL); - parser.declareField(optionalConstructorArg(), p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : p.doubleValue(), - MAX_FIELD, ObjectParser.ValueType.DOUBLE_OR_NULL); + InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( + "double_bounds", + false, + DoubleBounds.class + ); + parser.declareField( + optionalConstructorArg(), + p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : p.doubleValue(), + MIN_FIELD, + ObjectParser.ValueType.DOUBLE_OR_NULL + ); + parser.declareField( + optionalConstructorArg(), + p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : p.doubleValue(), + MAX_FIELD, + ObjectParser.ValueType.DOUBLE_OR_NULL + ); PARSER = parser.build(); } @@ -111,8 +122,7 @@ public boolean equals(Object obj) { return false; } DoubleBounds other = (DoubleBounds) obj; - return Objects.equals(min, other.min) - && Objects.equals(max, other.max); + return Objects.equals(min, other.min) && Objects.equals(max, other.max); } public Double getMin() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java index 9df26f29d428f..881fd9916d9c7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java @@ -9,10 +9,10 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -39,19 +39,24 @@ */ public class HistogramAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = "histogram"; - public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = - new ValuesSourceRegistry.RegistryKey<>(NAME, HistogramAggregatorSupplier.class); + public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>( + NAME, + HistogramAggregatorSupplier.class + ); private static final ObjectParser EXTENDED_BOUNDS_PARSER = new ObjectParser<>( - Histogram.EXTENDED_BOUNDS_FIELD.getPreferredName(), - () -> new double[]{ Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY }); + Histogram.EXTENDED_BOUNDS_FIELD.getPreferredName(), + () -> new double[] { Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY } + ); static { EXTENDED_BOUNDS_PARSER.declareDouble((bounds, d) -> bounds[0] = d, new ParseField("min")); EXTENDED_BOUNDS_PARSER.declareDouble((bounds, d) -> bounds[1] = d, new ParseField("max")); } - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, HistogramAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + HistogramAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); @@ -63,14 +68,25 @@ public class HistogramAggregationBuilder extends ValuesSourceAggregationBuilder< PARSER.declareLong(HistogramAggregationBuilder::minDocCount, Histogram.MIN_DOC_COUNT_FIELD); - PARSER.declareField(HistogramAggregationBuilder::extendedBounds, parser -> DoubleBounds.PARSER.apply(parser, null), - Histogram.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField( + HistogramAggregationBuilder::extendedBounds, + parser -> DoubleBounds.PARSER.apply(parser, null), + Histogram.EXTENDED_BOUNDS_FIELD, + ObjectParser.ValueType.OBJECT + ); - PARSER.declareField(HistogramAggregationBuilder::hardBounds, parser -> DoubleBounds.PARSER.apply(parser, null), - Histogram.HARD_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField( + HistogramAggregationBuilder::hardBounds, + parser -> DoubleBounds.PARSER.apply(parser, null), + Histogram.HARD_BOUNDS_FIELD, + ObjectParser.ValueType.OBJECT + ); - PARSER.declareObjectArray(HistogramAggregationBuilder::order, (p, c) -> InternalOrder.Parser.parseOrderParam(p), - Histogram.ORDER_FIELD); + PARSER.declareObjectArray( + HistogramAggregationBuilder::order, + (p, c) -> InternalOrder.Parser.parseOrderParam(p), + Histogram.ORDER_FIELD + ); } public static void registerAggregators(ValuesSourceRegistry.Builder builder) { @@ -95,9 +111,11 @@ public HistogramAggregationBuilder(String name) { super(name); } - protected HistogramAggregationBuilder(HistogramAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + protected HistogramAggregationBuilder( + HistogramAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.interval = clone.interval; this.offset = clone.offset; @@ -127,7 +145,7 @@ public HistogramAggregationBuilder(StreamInput in) throws IOException { } else { double minBound = in.readDouble(); double maxBound = in.readDouble(); - if (minBound == Double.POSITIVE_INFINITY && maxBound == Double.NEGATIVE_INFINITY) { + if (minBound == Double.POSITIVE_INFINITY && maxBound == Double.NEGATIVE_INFINITY) { extendedBounds = null; } else { extendedBounds = new DoubleBounds(minBound, maxBound); @@ -247,7 +265,7 @@ public HistogramAggregationBuilder order(BucketOrder order) { if (order == null) { throw new IllegalArgumentException("[order] must not be null: [" + name + "]"); } - if(order instanceof CompoundOrder || InternalOrder.isKeyOrder(order)) { + if (order instanceof CompoundOrder || InternalOrder.isKeyOrder(order)) { this.order = order; // if order already contains a tie-breaker we are good to go } else { // otherwise add a tie-breaker by using a compound order this.order = BucketOrder.compound(order); @@ -292,7 +310,8 @@ public long minDocCount() { public HistogramAggregationBuilder minDocCount(long minDocCount) { if (minDocCount < 0) { throw new IllegalArgumentException( - "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]"); + "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]" + ); } this.minDocCount = minDocCount; return this; @@ -344,26 +363,55 @@ protected ValuesSourceRegistry.RegistryKey getRegistryKey() { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - HistogramAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + HistogramAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); if (hardBounds != null && extendedBounds != null) { if (hardBounds.getMax() != null && extendedBounds.getMax() != null && hardBounds.getMax() < extendedBounds.getMax()) { - throw new IllegalArgumentException("Extended bounds have to be inside hard bounds, hard bounds: [" + - hardBounds + "], extended bounds: [" + extendedBounds.getMin() + "--" + extendedBounds.getMax() + "]"); + throw new IllegalArgumentException( + "Extended bounds have to be inside hard bounds, hard bounds: [" + + hardBounds + + "], extended bounds: [" + + extendedBounds.getMin() + + "--" + + extendedBounds.getMax() + + "]" + ); } if (hardBounds.getMin() != null && extendedBounds.getMin() != null && hardBounds.getMin() > extendedBounds.getMin()) { - throw new IllegalArgumentException("Extended bounds have to be inside hard bounds, hard bounds: [" + - hardBounds + "], extended bounds: [" + extendedBounds.getMin() + "--" + extendedBounds.getMax() + "]"); + throw new IllegalArgumentException( + "Extended bounds have to be inside hard bounds, hard bounds: [" + + hardBounds + + "], extended bounds: [" + + extendedBounds.getMin() + + "--" + + extendedBounds.getMax() + + "]" + ); } } - return new HistogramAggregatorFactory(name, config, interval, offset, order, keyed, minDocCount, extendedBounds, - hardBounds, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); + return new HistogramAggregatorFactory( + name, + config, + interval, + offset, + order, + keyed, + minDocCount, + extendedBounds, + hardBounds, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorFactory.java index 918537d7cbf51..873cfa33648dc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorFactory.java @@ -48,20 +48,22 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { ); } - public HistogramAggregatorFactory(String name, - ValuesSourceConfig config, - double interval, - double offset, - BucketOrder order, - boolean keyed, - long minDocCount, - DoubleBounds extendedBounds, - DoubleBounds hardBounds, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - HistogramAggregatorSupplier aggregatorSupplier) throws IOException { + public HistogramAggregatorFactory( + String name, + ValuesSourceConfig config, + double interval, + double offset, + BucketOrder order, + boolean keyed, + long minDocCount, + DoubleBounds extendedBounds, + DoubleBounds hardBounds, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + HistogramAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; this.interval = interval; @@ -78,31 +80,43 @@ public long minDocCount() { } @Override - protected Aggregator doCreateInternal(Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { - return aggregatorSupplier - .build( - name, - factories, - interval, - offset, - order, - keyed, - minDocCount, - extendedBounds, - hardBounds, - config, - context, - parent, - cardinality, - metadata - ); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build( + name, + factories, + interval, + offset, + order, + keyed, + minDocCount, + extendedBounds, + hardBounds, + config, + context, + parent, + cardinality, + metadata + ); } @Override protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - return new NumericHistogramAggregator(name, factories, interval, offset, order, keyed, minDocCount, extendedBounds, - hardBounds, config, context, parent, CardinalityUpperBound.NONE, metadata); + return new NumericHistogramAggregator( + name, + factories, + interval, + offset, + order, + keyed, + minDocCount, + extendedBounds, + hardBounds, + config, + context, + parent, + CardinalityUpperBound.NONE, + metadata + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java index 49718d11e9225..20602d33e51e2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -36,8 +36,9 @@ /** * Implementation of {@link Histogram}. */ -public final class InternalAutoDateHistogram extends - InternalMultiBucketAggregation implements Histogram, HistogramFactory { +public final class InternalAutoDateHistogram extends InternalMultiBucketAggregation< + InternalAutoDateHistogram, + InternalAutoDateHistogram.Bucket> implements Histogram, HistogramFactory { public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket, KeyComparable { @@ -46,8 +47,7 @@ public static class Bucket extends InternalMultiBucketAggregation.InternalBucket final InternalAggregations aggregations; protected final transient DocValueFormat format; - public Bucket(long key, long docCount, DocValueFormat format, - InternalAggregations aggregations) { + public Bucket(long key, long docCount, DocValueFormat format, InternalAggregations aggregations) { this.format = format; this.key = key; this.docCount = docCount; @@ -72,9 +72,7 @@ public boolean equals(Object obj) { InternalAutoDateHistogram.Bucket that = (InternalAutoDateHistogram.Bucket) obj; // No need to take the keyed and format parameters into account, // they are already stored and tested on the InternalDateHistogram object - return key == that.key - && docCount == that.docCount - && Objects.equals(aggregations, that.aggregations); + return key == that.key && docCount == that.docCount && Objects.equals(aggregations, that.aggregations); } @Override @@ -171,8 +169,8 @@ public boolean equals(Object obj) { } BucketInfo that = (BucketInfo) obj; return Objects.deepEquals(roundingInfos, that.roundingInfos) - && Objects.equals(roundingIdx, that.roundingIdx) - && Objects.equals(emptySubAggregations, that.emptySubAggregations); + && Objects.equals(roundingIdx, that.roundingIdx) + && Objects.equals(emptySubAggregations, that.emptySubAggregations); } @Override @@ -190,8 +188,15 @@ public int hashCode() { */ private final long bucketInnerInterval; - InternalAutoDateHistogram(String name, List buckets, int targetBuckets, BucketInfo emptyBucketInfo, DocValueFormat formatter, - Map metadata, long bucketInnerInterval) { + InternalAutoDateHistogram( + String name, + List buckets, + int targetBuckets, + BucketInfo emptyBucketInfo, + DocValueFormat formatter, + Map metadata, + long bucketInnerInterval + ) { super(name, metadata); this.buckets = buckets; this.bucketInfo = emptyBucketInfo; @@ -317,7 +322,7 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent key: "shards must return data sorted by key"; + assert top.current().key > key : "shards must return data sorted by key"; pq.updateTop(); } else { pq.pop(); @@ -330,10 +335,7 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent buckets = firstPassResult.buckets; Rounding.Prepared prepared = firstPassResult.preparedRounding; - while (buckets.size() > (targetBuckets * info.getMaximumInnerInterval()) - && idx < bucketInfo.roundingInfos.length - 1) { + while (buckets.size() > (targetBuckets * info.getMaximumInnerInterval()) && idx < bucketInfo.roundingInfos.length - 1) { idx++; info = bucketInfo.roundingInfos[idx]; prepared = prepare(idx, firstPassResult.min, firstPassResult.max); @@ -459,8 +460,7 @@ private BucketReduceResult addEmptyBuckets(BucketReduceResult current, ReduceCon return new BucketReduceResult(list, roundingIdx, 1, rounding, current.min, current.max); } - static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, - RoundingInfo[] roundings, int targetBuckets) { + static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, RoundingInfo[] roundings, int targetBuckets) { if (roundingIdx == roundings.length - 1) { return roundingIdx; } @@ -493,7 +493,7 @@ static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, } currentRoundingIdx++; } while (requiredBuckets > (targetBuckets * roundings[currentRoundingIdx - 1].getMaximumInnerInterval()) - && currentRoundingIdx < roundings.length); + && currentRoundingIdx < roundings.length); // The loop will increase past the correct rounding index here so we // need to subtract one to get the rounding index we need return currentRoundingIdx - 1; @@ -514,11 +514,21 @@ public InternalAggregation reduce(List aggregations, Reduce reducedBucketsResult = maybeMergeConsecutiveBuckets(reducedBucketsResult, reduceContext); } reduceContext.consumeBucketsAndMaybeBreak(reducedBucketsResult.buckets.size()); - BucketInfo bucketInfo = new BucketInfo(this.bucketInfo.roundingInfos, reducedBucketsResult.roundingIdx, - this.bucketInfo.emptySubAggregations); + BucketInfo bucketInfo = new BucketInfo( + this.bucketInfo.roundingInfos, + reducedBucketsResult.roundingIdx, + this.bucketInfo.emptySubAggregations + ); - return new InternalAutoDateHistogram(getName(), reducedBucketsResult.buckets, targetBuckets, bucketInfo, format, - getMetadata(), reducedBucketsResult.innerInterval); + return new InternalAutoDateHistogram( + getName(), + reducedBucketsResult.buckets, + targetBuckets, + bucketInfo, + format, + getMetadata(), + reducedBucketsResult.innerInterval + ); } private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult current, ReduceContext reduceContext) { @@ -611,9 +621,7 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalAutoDateHistogram that = (InternalAutoDateHistogram) obj; - return Objects.equals(buckets, that.buckets) - && Objects.equals(format, that.format) - && Objects.equals(bucketInfo, that.bucketInfo); + return Objects.equals(buckets, that.buckets) && Objects.equals(format, that.format) && Objects.equals(bucketInfo, that.bucketInfo); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index afc1be94db5a5..10dfa5cc9f687 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -39,7 +39,9 @@ * Implementation of {@link Histogram}. */ public final class InternalDateHistogram extends InternalMultiBucketAggregation - implements Histogram, HistogramFactory { + implements + Histogram, + HistogramFactory { public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket, KeyComparable { @@ -49,8 +51,7 @@ public static class Bucket extends InternalMultiBucketAggregation.InternalBucket private final transient boolean keyed; protected final transient DocValueFormat format; - public Bucket(long key, long docCount, boolean keyed, DocValueFormat format, - InternalAggregations aggregations) { + public Bucket(long key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { this.format = format; this.keyed = keyed; this.key = key; @@ -77,9 +78,7 @@ public boolean equals(Object obj) { InternalDateHistogram.Bucket that = (InternalDateHistogram.Bucket) obj; // No need to take the keyed and format parameters into account, // they are already stored and tested on the InternalDateHistogram object - return key == that.key - && docCount == that.docCount - && Objects.equals(aggregations, that.aggregations); + return key == that.key && docCount == that.docCount && Objects.equals(aggregations, that.aggregations); } @Override @@ -181,8 +180,8 @@ public boolean equals(Object obj) { } EmptyBucketInfo that = (EmptyBucketInfo) obj; return Objects.equals(rounding, that.rounding) - && Objects.equals(bounds, that.bounds) - && Objects.equals(subAggregations, that.subAggregations); + && Objects.equals(bounds, that.bounds) + && Objects.equals(subAggregations, that.subAggregations); } @Override @@ -199,8 +198,17 @@ public int hashCode() { private final long offset; final EmptyBucketInfo emptyBucketInfo; - InternalDateHistogram(String name, List buckets, BucketOrder order, long minDocCount, long offset, - EmptyBucketInfo emptyBucketInfo, DocValueFormat formatter, boolean keyed, Map metadata) { + InternalDateHistogram( + String name, + List buckets, + BucketOrder order, + long minDocCount, + long offset, + EmptyBucketInfo emptyBucketInfo, + DocValueFormat formatter, + boolean keyed, + Map metadata + ) { super(name, metadata); this.buckets = buckets; this.order = order; @@ -461,7 +469,7 @@ public InternalAggregation reduce(List aggregations, Reduce List reverse = new ArrayList<>(reducedBuckets); Collections.reverse(reverse); reducedBuckets = reverse; - } else if (InternalOrder.isKeyAsc(order) == false){ + } else if (InternalOrder.isKeyAsc(order) == false) { // nothing to do when sorting by key ascending, as data is already sorted since shards return // sorted buckets and the merge-sort performed by reduceBuckets maintains order. // otherwise, sorted by compound order or sub-aggregation, we need to fall back to a costly n*log(n) sort @@ -471,8 +479,17 @@ public InternalAggregation reduce(List aggregations, Reduce if (false == alreadyAccountedForBuckets) { reduceContext.consumeBucketsAndMaybeBreak(reducedBuckets.size()); } - return new InternalDateHistogram(getName(), reducedBuckets, order, minDocCount, offset, emptyBucketInfo, - format, keyed, getMetadata()); + return new InternalDateHistogram( + getName(), + reducedBuckets, + order, + minDocCount, + offset, + emptyBucketInfo, + format, + keyed, + getMetadata() + ); } @Override @@ -529,12 +546,12 @@ public boolean equals(Object obj) { InternalDateHistogram that = (InternalDateHistogram) obj; return Objects.equals(buckets, that.buckets) - && Objects.equals(order, that.order) - && Objects.equals(format, that.format) - && Objects.equals(keyed, that.keyed) - && Objects.equals(minDocCount, that.minDocCount) - && Objects.equals(offset, that.offset) - && Objects.equals(emptyBucketInfo, that.emptyBucketInfo); + && Objects.equals(order, that.order) + && Objects.equals(format, that.format) + && Objects.equals(keyed, that.keyed) + && Objects.equals(minDocCount, that.minDocCount) + && Objects.equals(offset, that.offset) + && Objects.equals(emptyBucketInfo, that.emptyBucketInfo); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index 58e1f5f602432..81c631bf5c0da 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -36,7 +36,9 @@ * Implementation of {@link Histogram}. */ public final class InternalHistogram extends InternalMultiBucketAggregation - implements Histogram, HistogramFactory { + implements + Histogram, + HistogramFactory { public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket, KeyComparable { final double key; @@ -45,8 +47,7 @@ public static class Bucket extends InternalMultiBucketAggregation.InternalBucket private final transient boolean keyed; protected final transient DocValueFormat format; - public Bucket(double key, long docCount, boolean keyed, DocValueFormat format, - InternalAggregations aggregations) { + public Bucket(double key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { this.format = format; this.keyed = keyed; this.key = key; @@ -73,9 +74,7 @@ public boolean equals(Object obj) { Bucket that = (Bucket) obj; // No need to take the keyed and format parameters into account, // they are already stored and tested on the InternalHistogram object - return key == that.key - && docCount == that.docCount - && Objects.equals(aggregations, that.aggregations); + return key == that.key && docCount == that.docCount && Objects.equals(aggregations, that.aggregations); } @Override @@ -174,10 +173,10 @@ public boolean equals(Object obj) { } EmptyBucketInfo that = (EmptyBucketInfo) obj; return interval == that.interval - && offset == that.offset - && minBound == that.minBound - && maxBound == that.maxBound - && Objects.equals(subAggregations, that.subAggregations); + && offset == that.offset + && minBound == that.minBound + && maxBound == that.maxBound + && Objects.equals(subAggregations, that.subAggregations); } @Override @@ -201,7 +200,8 @@ public InternalHistogram( EmptyBucketInfo emptyBucketInfo, DocValueFormat formatter, boolean keyed, - Map metadata) { + Map metadata + ) { super(name, metadata); this.buckets = buckets; this.order = order; @@ -444,7 +444,7 @@ public InternalAggregation reduce(List aggregations, Reduce List reverse = new ArrayList<>(reducedBuckets); Collections.reverse(reverse); reducedBuckets = reverse; - } else if (InternalOrder.isKeyAsc(order) == false){ + } else if (InternalOrder.isKeyAsc(order) == false) { // nothing to do when sorting by key ascending, as data is already sorted since shards return // sorted buckets and the merge-sort performed by reduceBuckets maintains order. // otherwise, sorted by compound order or sub-aggregation, we need to fall back to a costly n*log(n) sort @@ -511,11 +511,11 @@ public boolean equals(Object obj) { InternalHistogram that = (InternalHistogram) obj; return Objects.equals(buckets, that.buckets) - && Objects.equals(emptyBucketInfo, that.emptyBucketInfo) - && Objects.equals(format, that.format) - && Objects.equals(keyed, that.keyed) - && Objects.equals(minDocCount, that.minDocCount) - && Objects.equals(order, that.order); + && Objects.equals(emptyBucketInfo, that.emptyBucketInfo) + && Objects.equals(format, that.format) + && Objects.equals(keyed, that.keyed) + && Objects.equals(minDocCount, that.minDocCount) + && Objects.equals(order, that.order); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java index 1cf09dfde1fce..f35354abdbdb4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java @@ -29,12 +29,11 @@ import java.util.Map; import java.util.Objects; -public class InternalVariableWidthHistogram - extends InternalMultiBucketAggregation - implements Histogram, HistogramFactory{ +public class InternalVariableWidthHistogram extends InternalMultiBucketAggregation< + InternalVariableWidthHistogram, + InternalVariableWidthHistogram.Bucket> implements Histogram, HistogramFactory { - public static class Bucket extends InternalMultiBucketAggregation.InternalBucket - implements Histogram.Bucket, KeyComparable { + public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket, KeyComparable { public static class BucketBounds { public double min; @@ -55,7 +54,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeDouble(max); } - public boolean equals(Object obj){ + public boolean equals(Object obj) { if (this == obj) return true; if (obj == null || getClass() != obj.getClass()) return false; BucketBounds that = (BucketBounds) obj; @@ -74,11 +73,7 @@ public int hashCode() { protected final transient DocValueFormat format; private double centroid; - public Bucket(double centroid, - BucketBounds bounds, - long docCount, - DocValueFormat format, - InternalAggregations aggregations) { + public Bucket(double centroid, BucketBounds bounds, long docCount, DocValueFormat format, InternalAggregations aggregations) { this.format = format; this.centroid = centroid; this.bounds = bounds; @@ -133,13 +128,21 @@ public String getKeyAsString() { * are buckets, which is incorrect. */ @Override - public Object getKey() { return centroid; } + public Object getKey() { + return centroid; + } - public double min() { return bounds.min; } + public double min() { + return bounds.min; + } - public double max() { return bounds.max; } + public double max() { + return bounds.max; + } - public double centroid() { return centroid; } + public double centroid() { + return centroid; + } @Override public long getDocCount() { @@ -223,8 +226,14 @@ public int hashCode() { private final int targetNumBuckets; final EmptyBucketInfo emptyBucketInfo; - InternalVariableWidthHistogram(String name, List buckets, EmptyBucketInfo emptyBucketInfo, int targetNumBuckets, - DocValueFormat formatter, Map metaData){ + InternalVariableWidthHistogram( + String name, + List buckets, + EmptyBucketInfo emptyBucketInfo, + int targetNumBuckets, + DocValueFormat formatter, + Map metaData + ) { super(name, metaData); this.buckets = buckets; this.emptyBucketInfo = emptyBucketInfo; @@ -235,7 +244,7 @@ public int hashCode() { /** * Stream from a stream. */ - public InternalVariableWidthHistogram(StreamInput in) throws IOException{ + public InternalVariableWidthHistogram(StreamInput in) throws IOException { super(in); emptyBucketInfo = new EmptyBucketInfo(in); format = in.readNamedWriteable(DocValueFormat.class); @@ -275,8 +284,7 @@ public EmptyBucketInfo getEmptyBucketInfo() { @Override public InternalVariableWidthHistogram create(List buckets) { - return new InternalVariableWidthHistogram(name, buckets, emptyBucketInfo, targetNumBuckets, - format, metadata); + return new InternalVariableWidthHistogram(name, buckets, emptyBucketInfo, targetNumBuckets, format, metadata); } @Override @@ -286,8 +294,7 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) @Override public Bucket createBucket(Number key, long docCount, InternalAggregations aggregations) { - return new Bucket(key.doubleValue(), new Bucket.BucketBounds(key.doubleValue(), key.doubleValue()), - docCount, format, aggregations); + return new Bucket(key.doubleValue(), new Bucket.BucketBounds(key.doubleValue(), key.doubleValue()), docCount, format, aggregations); } @Override @@ -303,8 +310,10 @@ public Number nextKey(Number key) { /** * This method should not be called for this specific subclass of InternalHistogram, since there should not be * empty buckets when clustering. -= */ - private double nextKey(double key){ return key + 1; } + = */ + private double nextKey(double key) { + return key + 1; + } @Override protected Bucket reduceBucket(List buckets, ReduceContext context) { @@ -341,7 +350,7 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent reducedBuckets = new ArrayList<>(); - if(pq.size() > 0) { + if (pq.size() > 0) { double key = pq.top().current().centroid(); // list of buckets coming from different shards that have the same key List currentBuckets = new ArrayList<>(); @@ -367,7 +376,7 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent 0); + } while (pq.size() > 0); if (currentBuckets.isEmpty() == false) { final Bucket reduced = reduceBucket(currentBuckets, reduceContext); @@ -380,8 +389,7 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent 0) { + if (docCount + other.docCount > 0) { // Avoids div by 0 error. This condition could be false if the optional docCount field was not set centroid = ((centroid * docCount) + (other.centroid * other.docCount)) / (docCount + other.docCount); docCount += other.docCount; @@ -412,22 +420,22 @@ public void mergeWith(BucketRange other){ * For each range {startIdx, endIdx} in ranges, all the buckets in that index range * from buckets are merged, and this merged bucket replaces the entire range. */ - private void mergeBucketsWithPlan(List buckets, List plan, ReduceContext reduceContext){ - for(int i = plan.size() - 1; i >= 0; i--) { + private void mergeBucketsWithPlan(List buckets, List plan, ReduceContext reduceContext) { + for (int i = plan.size() - 1; i >= 0; i--) { BucketRange range = plan.get(i); int endIdx = range.endIdx; int startIdx = range.startIdx; - if(startIdx == endIdx) continue; + if (startIdx == endIdx) continue; List toMerge = new ArrayList<>(); - for(int idx = endIdx; idx > startIdx; idx--){ + for (int idx = endIdx; idx > startIdx; idx--) { toMerge.add(buckets.get(idx)); buckets.remove(idx); } toMerge.add(buckets.get(startIdx)); // Don't remove the startIdx bucket because it will be replaced by the merged bucket - int toRemove = toMerge.stream().mapToInt(b -> countInnerBucket(b)+1).sum(); + int toRemove = toMerge.stream().mapToInt(b -> countInnerBucket(b) + 1).sum(); reduceContext.consumeBucketsAndMaybeBreak(-toRemove + 1); Bucket merged_bucket = reduceBucket(toMerge, reduceContext); @@ -481,7 +489,7 @@ private void mergeBucketsIfNeeded(List buckets, int targetNumBuckets, Re mergeBucketsWithPlan(buckets, ranges, reduceContext); } - private void mergeBucketsWithSameMin(List buckets, ReduceContext reduceContext){ + private void mergeBucketsWithSameMin(List buckets, ReduceContext reduceContext) { // Create a merge plan List ranges = new ArrayList<>(); @@ -496,14 +504,14 @@ private void mergeBucketsWithSameMin(List buckets, ReduceContext reduceC // Merge ranges with same min value int i = 0; - while(i < ranges.size() - 1){ + while (i < ranges.size() - 1) { BucketRange range = ranges.get(i); - BucketRange nextRange = ranges.get(i+1); + BucketRange nextRange = ranges.get(i + 1); - if(range.min == nextRange.min){ + if (range.min == nextRange.min) { range.mergeWith(nextRange); - ranges.remove(i+1); - } else{ + ranges.remove(i + 1); + } else { i++; } } @@ -518,11 +526,11 @@ private void mergeBucketsWithSameMin(List buckets, ReduceContext reduceC * * After this adjustment, A will contain more values than indicated and B will have less. */ - private void adjustBoundsForOverlappingBuckets(List buckets, ReduceContext reduceContext){ - for(int i = 1; i < buckets.size(); i++){ + private void adjustBoundsForOverlappingBuckets(List buckets, ReduceContext reduceContext) { + for (int i = 1; i < buckets.size(); i++) { Bucket curBucket = buckets.get(i); - Bucket prevBucket = buckets.get(i-1); - if(curBucket.bounds.min < prevBucket.bounds.max){ + Bucket prevBucket = buckets.get(i - 1); + if (curBucket.bounds.min < prevBucket.bounds.max) { // We don't want overlapping buckets --> Adjust their bounds // TODO: Think of a fairer way to do this. Should prev.max = cur.min? curBucket.bounds.min = (prevBucket.bounds.max + curBucket.bounds.min) / 2; @@ -535,13 +543,12 @@ private void adjustBoundsForOverlappingBuckets(List buckets, ReduceConte public InternalAggregation reduce(List aggregations, ReduceContext reduceContext) { List reducedBuckets = reduceBuckets(aggregations, reduceContext); - if(reduceContext.isFinalReduce()) { + if (reduceContext.isFinalReduce()) { buckets.sort(Comparator.comparing(Bucket::min)); mergeBucketsWithSameMin(reducedBuckets, reduceContext); adjustBoundsForOverlappingBuckets(reducedBuckets, reduceContext); } - return new InternalVariableWidthHistogram(getName(), reducedBuckets, emptyBucketInfo, targetNumBuckets, - format, metadata); + return new InternalVariableWidthHistogram(getName(), reducedBuckets, emptyBucketInfo, targetNumBuckets, format, metadata); } @Override @@ -562,8 +569,7 @@ public InternalAggregation createAggregation(List PARSER = new ConstructingObjectParser<>( - "bounds", a -> { + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("bounds", a -> { assert a.length == 2; Long min = null; Long max = null; @@ -153,8 +152,20 @@ LongBounds parseAndValidate(String aggName, String boundsName, LongSupplier nowI max = format.parseLong(maxAsStr, false, nowInMillis); } if (min != null && max != null && min.compareTo(max) > 0) { - throw new IllegalArgumentException("[" + boundsName + ".min][" + min + "] cannot be greater than " + - "[" + boundsName + ".max][" + max + "] for histogram aggregation [" + aggName + "]"); + throw new IllegalArgumentException( + "[" + + boundsName + + ".min][" + + min + + "] cannot be greater than " + + "[" + + boundsName + + ".max][" + + max + + "] for histogram aggregation [" + + aggName + + "]" + ); } return new LongBounds(min, max, minAsStr, maxAsStr); } @@ -162,9 +173,7 @@ LongBounds parseAndValidate(String aggName, String boundsName, LongSupplier nowI LongBounds round(Rounding rounding) { // Extended bounds shouldn't be effected by the offset Rounding effectiveRounding = rounding.withoutOffset(); - return new LongBounds( - min != null ? effectiveRounding.round(min) : null, - max != null ? effectiveRounding.round(max) : null); + return new LongBounds(min != null ? effectiveRounding.round(min) : null, max != null ? effectiveRounding.round(max) : null); } @Override @@ -197,9 +206,9 @@ public boolean equals(Object obj) { } LongBounds other = (LongBounds) obj; return Objects.equals(min, other.min) - && Objects.equals(max, other.max) - && Objects.equals(minAsStr, other.minAsStr) - && Objects.equals(maxAsStr, other.maxAsStr); + && Objects.equals(max, other.max) + && Objects.equals(minAsStr, other.minAsStr) + && Objects.equals(maxAsStr, other.maxAsStr); } public Long getMin() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregator.java index ba93e7a144442..1045c2e8a9fc7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregator.java @@ -78,8 +78,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java index 23941fc2083d4..0a672488c83c4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java @@ -8,8 +8,8 @@ package org.elasticsearch.search.aggregations.bucket.histogram; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; @@ -41,14 +41,18 @@ public List getBuckets() { return buckets; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedAutoDateHistogram.class.getSimpleName(), true, ParsedAutoDateHistogram::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedAutoDateHistogram.class.getSimpleName(), + true, + ParsedAutoDateHistogram::new + ); static { - declareMultiBucketAggregationFields(PARSER, - parser -> ParsedBucket.fromXContent(parser, false), - parser -> ParsedBucket.fromXContent(parser, true)); - PARSER.declareString((parsed, value) -> parsed.interval = value, - new ParseField("interval")); + declareMultiBucketAggregationFields( + PARSER, + parser -> ParsedBucket.fromXContent(parser, false), + parser -> ParsedBucket.fromXContent(parser, true) + ); + PARSER.declareString((parsed, value) -> parsed.interval = value, new ParseField("interval")); } public static ParsedAutoDateHistogram fromXContent(XContentParser parser, String name) throws IOException { @@ -64,7 +68,6 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) return builder; } - public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket { private Long key; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java index 1492629b7400c..64f81918c3bf7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java @@ -30,12 +30,17 @@ public List getBuckets() { return buckets; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedDateHistogram.class.getSimpleName(), true, ParsedDateHistogram::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedDateHistogram.class.getSimpleName(), + true, + ParsedDateHistogram::new + ); static { - declareMultiBucketAggregationFields(PARSER, - parser -> ParsedBucket.fromXContent(parser, false), - parser -> ParsedBucket.fromXContent(parser, true)); + declareMultiBucketAggregationFields( + PARSER, + parser -> ParsedBucket.fromXContent(parser, false), + parser -> ParsedBucket.fromXContent(parser, true) + ); } public static ParsedDateHistogram fromXContent(XContentParser parser, String name) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedHistogram.java index 77cb0dc4a1fd9..c628b624c0930 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedHistogram.java @@ -27,12 +27,17 @@ public List getBuckets() { return buckets; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedHistogram.class.getSimpleName(), true, ParsedHistogram::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedHistogram.class.getSimpleName(), + true, + ParsedHistogram::new + ); static { - declareMultiBucketAggregationFields(PARSER, - parser -> ParsedBucket.fromXContent(parser, false), - parser -> ParsedBucket.fromXContent(parser, true)); + declareMultiBucketAggregationFields( + PARSER, + parser -> ParsedBucket.fromXContent(parser, false), + parser -> ParsedBucket.fromXContent(parser, true) + ); } public static ParsedHistogram fromXContent(XContentParser parser, String name) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedVariableWidthHistogram.java index 383f4f8b1c948..c1ec1b4699542 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedVariableWidthHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedVariableWidthHistogram.java @@ -23,24 +23,30 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; public class ParsedVariableWidthHistogram extends ParsedMultiBucketAggregation - implements Histogram{ + implements + Histogram { @Override - public String getType() { return VariableWidthHistogramAggregationBuilder.NAME; } + public String getType() { + return VariableWidthHistogramAggregationBuilder.NAME; + } @Override - public List getBuckets() { return buckets; } - - private static ObjectParser PARSER = - new ObjectParser<>( - ParsedVariableWidthHistogram.class.getSimpleName(), - true, - ParsedVariableWidthHistogram::new - ) ; + public List getBuckets() { + return buckets; + } + + private static ObjectParser PARSER = new ObjectParser<>( + ParsedVariableWidthHistogram.class.getSimpleName(), + true, + ParsedVariableWidthHistogram::new + ); static { - declareMultiBucketAggregationFields(PARSER, + declareMultiBucketAggregationFields( + PARSER, parser -> ParsedBucket.fromXContent(parser, false), - parser -> ParsedBucket.fromXContent(parser, true)); + parser -> ParsedBucket.fromXContent(parser, true) + ); } public static ParsedVariableWidthHistogram fromXContent(XContentParser parser, String name) throws IOException { @@ -49,8 +55,7 @@ public static ParsedVariableWidthHistogram fromXContent(XContentParser parser, S return aggregation; } - - public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket{ + public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket { private Double key; private Double min; @@ -80,7 +85,7 @@ public void setMin(Double min) { this.min = min; } - public void setMinAsString(String minAsString){ + public void setMinAsString(String minAsString) { this.minAsString = minAsString; } @@ -98,11 +103,11 @@ public String getMinAsString() { return null; } - public void setMax(Double max){ + public void setMax(Double max) { this.max = max; } - public void setMaxAsString(String maxAsString){ + public void setMaxAsString(String maxAsString) { this.maxAsString = maxAsString; } @@ -154,8 +159,12 @@ static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IO if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) { bucket.key = parser.doubleValue(); } else { - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, - aggregations::add); + XContentParserUtils.parseTypedKeysObject( + parser, + Aggregation.TYPED_KEYS_DELIMITER, + Aggregation.class, + aggregations::add + ); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregator.java index 5c92408d814ec..5ca61ffd30bcb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregator.java @@ -97,10 +97,12 @@ public void collect(int doc, long owningBucketOrd) throws IOException { // The encoding should ensure that this assert is always true. assert from >= previousFrom : "Start of range not >= previous start"; final Double to = rangeType.doubleValue(range.getTo()); - final double effectiveFrom = (hardBounds != null && hardBounds.getMin() != null) ? - Double.max(from, hardBounds.getMin()) : from; - final double effectiveTo = (hardBounds != null && hardBounds.getMax() != null) ? - Double.min(to, hardBounds.getMax()) : to; + final double effectiveFrom = (hardBounds != null && hardBounds.getMin() != null) + ? Double.max(from, hardBounds.getMin()) + : from; + final double effectiveTo = (hardBounds != null && hardBounds.getMax() != null) + ? Double.min(to, hardBounds.getMax()) + : to; final double startKey = Math.floor((effectiveFrom - offset) / interval); final double endKey = Math.floor((effectiveTo - offset) / interval); for (double key = Math.max(startKey, previousKey); key <= endKey; key++) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregationBuilder.java index 2c8f489179ab9..002bb1c3b68ef 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregationBuilder.java @@ -8,11 +8,11 @@ package org.elasticsearch.search.aggregations.bucket.histogram; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -43,9 +43,11 @@ public class VariableWidthHistogramAggregationBuilder extends ValuesSourceAggreg private static final ParseField SHARD_SIZE_FIELD = new ParseField("shard_size"); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, VariableWidthHistogramAggregationBuilder::new); - static{ + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + VariableWidthHistogramAggregationBuilder::new + ); + static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, true); PARSER.declareInt(VariableWidthHistogramAggregationBuilder::setNumBuckets, NUM_BUCKETS_FIELD); PARSER.declareInt(VariableWidthHistogramAggregationBuilder::setShardSize, SHARD_SIZE_FIELD); @@ -71,9 +73,11 @@ public VariableWidthHistogramAggregationBuilder(StreamInput in) throws IOExcepti numBuckets = in.readVInt(); } - protected VariableWidthHistogramAggregationBuilder(VariableWidthHistogramAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metaData) { + protected VariableWidthHistogramAggregationBuilder( + VariableWidthHistogramAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metaData + ) { super(clone, factoriesBuilder, metaData); this.numBuckets = clone.numBuckets; } @@ -83,10 +87,9 @@ protected ValuesSourceType defaultValueSourceType() { return CoreValuesSourceType.NUMERIC; } - public VariableWidthHistogramAggregationBuilder setNumBuckets(int numBuckets){ + public VariableWidthHistogramAggregationBuilder setNumBuckets(int numBuckets) { if (numBuckets <= 0) { - throw new IllegalArgumentException(NUM_BUCKETS_FIELD.getPreferredName() + " must be greater than [0] for [" - + name + "]"); + throw new IllegalArgumentException(NUM_BUCKETS_FIELD.getPreferredName() + " must be greater than [0] for [" + name + "]"); } this.numBuckets = numBuckets; return this; @@ -103,8 +106,7 @@ public VariableWidthHistogramAggregationBuilder setShardSize(int shardSize) { public VariableWidthHistogramAggregationBuilder setInitialBuffer(int initialBuffer) { if (initialBuffer <= 0) { - throw new IllegalArgumentException(INITIAL_BUFFER_FIELD.getPreferredName() + " must be greater than [0] for [" - + name + "]"); + throw new IllegalArgumentException(INITIAL_BUFFER_FIELD.getPreferredName() + " must be greater than [0] for [" + name + "]"); } this.initialBuffer = initialBuffer; return this; @@ -144,10 +146,12 @@ protected void innerWriteTo(StreamOutput out) throws IOException { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { Settings settings = context.getIndexSettings().getNodeSettings(); int maxBuckets = MultiBucketConsumerService.MAX_BUCKET_SETTING.get(settings); if (numBuckets > maxBuckets) { @@ -172,16 +176,35 @@ protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, int mergePhaseInit = VariableWidthHistogramAggregator.mergePhaseInitialBucketCount(shardSize); if (mergePhaseInit < numBuckets) { // If the initial buckets from the merge phase is super low we will consistently return too few buckets - throw new IllegalArgumentException("3/4 of " + SHARD_SIZE_FIELD.getPreferredName() + " must be at least " - + NUM_BUCKETS_FIELD.getPreferredName() + " but was [" + mergePhaseInit + "<" + numBuckets + "] for [" + name + "]"); + throw new IllegalArgumentException( + "3/4 of " + + SHARD_SIZE_FIELD.getPreferredName() + + " must be at least " + + NUM_BUCKETS_FIELD.getPreferredName() + + " but was [" + + mergePhaseInit + + "<" + + numBuckets + + "] for [" + + name + + "]" + ); } - VariableWidthHistogramAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + VariableWidthHistogramAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new VariableWidthHistogramAggregatorFactory(name, config, numBuckets, shardSize, initialBuffer, - context, parent, subFactoriesBuilder, metadata, - aggregatorSupplier); + return new VariableWidthHistogramAggregatorFactory( + name, + config, + numBuckets, + shardSize, + initialBuffer, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override @@ -207,7 +230,9 @@ public boolean equals(Object obj) { } @Override - public String getType() { return NAME; } + public String getType() { + return NAME; + } @Override protected ValuesSourceRegistry.RegistryKey getRegistryKey() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java index 5dfbe6db1eb4e..6802fcd8ea2bc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java @@ -12,11 +12,11 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.InPlaceMergeSorter; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -59,7 +59,6 @@ private abstract class CollectionPhase implements Releasable { */ abstract CollectionPhase collectValue(LeafBucketCollector sub, int doc, double val) throws IOException; - /** * @return the final number of buckets that will be used * If this is not the final phase, then an instance of the next phase is created and it is asked for this answer. @@ -79,14 +78,14 @@ private abstract class CollectionPhase implements Releasable { * Phase 1: Build up a buffer of docs (i.e. give each new doc its own bucket). No clustering decisions are made here. * Building this buffer lets us analyze the distribution of the data before we begin clustering. */ - private class BufferValuesPhase extends CollectionPhase{ + private class BufferValuesPhase extends CollectionPhase { private DoubleArray buffer; private int bufferSize; private int bufferLimit; private MergeBucketsPhase mergeBucketsPhase; - BufferValuesPhase(int bufferLimit){ + BufferValuesPhase(int bufferLimit) { this.buffer = bigArrays().newDoubleArray(1); this.bufferSize = 0; this.bufferLimit = bufferLimit; @@ -94,7 +93,7 @@ private class BufferValuesPhase extends CollectionPhase{ } @Override - public CollectionPhase collectValue(LeafBucketCollector sub, int doc, double val) throws IOException{ + public CollectionPhase collectValue(LeafBucketCollector sub, int doc, double val) throws IOException { if (bufferSize < bufferLimit) { // Add to the buffer i.e store the doc in a new bucket buffer = bigArrays().grow(buffer, bufferSize + 1); @@ -103,7 +102,7 @@ public CollectionPhase collectValue(LeafBucketCollector sub, int doc, double val bufferSize += 1; } - if(bufferSize == bufferLimit) { + if (bufferSize == bufferLimit) { // We have hit the buffer limit. Switch to merge mode CollectionPhase mergeBuckets = new MergeBucketsPhase(buffer, bufferSize); Releasables.close(this); @@ -114,18 +113,18 @@ public CollectionPhase collectValue(LeafBucketCollector sub, int doc, double val } } - int finalNumBuckets(){ + int finalNumBuckets() { return getMergeBucketPhase().finalNumBuckets(); } @Override - InternalVariableWidthHistogram.Bucket buildBucket(int bucketOrd, InternalAggregations subAggregations) throws IOException{ + InternalVariableWidthHistogram.Bucket buildBucket(int bucketOrd, InternalAggregations subAggregations) throws IOException { InternalVariableWidthHistogram.Bucket bucket = getMergeBucketPhase().buildBucket(bucketOrd, subAggregations); return bucket; } - MergeBucketsPhase getMergeBucketPhase(){ - if(mergeBucketsPhase == null){ + MergeBucketsPhase getMergeBucketPhase() { + if (mergeBucketsPhase == null) { mergeBucketsPhase = new MergeBucketsPhase(buffer, bufferSize); } return mergeBucketsPhase; @@ -133,7 +132,7 @@ MergeBucketsPhase getMergeBucketPhase(){ @Override public void close() { - if(mergeBucketsPhase != null){ + if (mergeBucketsPhase != null) { Releasables.close(mergeBucketsPhase); } Releasables.close(buffer); @@ -145,7 +144,7 @@ public void close() { * It is responsible for merging the buffered docs into a smaller number of buckets and then determining which existing * bucket all subsequent docs belong to. New buckets will be created for docs that are distant from all existing ones */ - private class MergeBucketsPhase extends CollectionPhase{ + private class MergeBucketsPhase extends CollectionPhase { /** * "Cluster" refers to intermediate buckets during collection * They are kept sorted by centroid. The i'th index in all these arrays always refers to the i'th cluster @@ -162,7 +161,7 @@ private class MergeBucketsPhase extends CollectionPhase{ // Cluster the documents to reduce the number of buckets bucketBufferedDocs(buffer, bufferSize, mergePhaseInitialBucketCount(shardSize)); - if(bufferSize > 1) { + if (bufferSize > 1) { updateAvgBucketDistance(); } } @@ -176,11 +175,11 @@ private class ClusterSorter extends InPlaceMergeSorter { final DoubleArray values; final long[] indexes; - ClusterSorter(DoubleArray values, int length){ + ClusterSorter(DoubleArray values, int length) { this.values = values; this.indexes = new long[length]; - for(int i = 0; i < indexes.length; i++){ + for (int i = 0; i < indexes.length; i++) { indexes[i] = i; } } @@ -206,7 +205,7 @@ protected void swap(int i, int j) { * * See BucketsAggregator::mergeBuckets to learn more about the merge map */ - public long[] generateMergeMap(){ + public long[] generateMergeMap() { sort(0, indexes.length); return indexes; } @@ -236,19 +235,19 @@ private void bucketBufferedDocs(final DoubleArray buffer, final int bufferSize, // This will require modifying the merge map, which currently represents a sorted list of buckets with 1 doc / bucket int docsPerBucket = (int) Math.ceil((double) bufferSize / (double) numBuckets); int bucketOrd = 0; - for(int i = 0; i < mergeMap.length; i++){ + for (int i = 0; i < mergeMap.length; i++) { // mergeMap[i] is the index of the i'th smallest doc double val = buffer.get(mergeMap[i]); // Put the i'th smallest doc into the bucket at bucketOrd - mergeMap[i] = (int)(mergeMap[i]/docsPerBucket); - if(bucketOrd == numClusters){ + mergeMap[i] = (int) (mergeMap[i] / docsPerBucket); + if (bucketOrd == numClusters) { createAndAppendNewCluster(val); } else { addToCluster(bucketOrd, val); } - if((i + 1) % docsPerBucket == 0){ + if ((i + 1) % docsPerBucket == 0) { // This bucket is full. Make a new one bucketOrd += 1; } @@ -262,17 +261,17 @@ private void bucketBufferedDocs(final DoubleArray buffer, final int bufferSize, } @Override - public CollectionPhase collectValue(LeafBucketCollector sub, int doc, double val) throws IOException{ + public CollectionPhase collectValue(LeafBucketCollector sub, int doc, double val) throws IOException { int bucketOrd = getNearestBucket(val); double distance = Math.abs(clusterCentroids.get(bucketOrd) - val); - if(bucketOrd == -1 || distance > (2 * avgBucketDistance) && numClusters < shardSize) { + if (bucketOrd == -1 || distance > (2 * avgBucketDistance) && numClusters < shardSize) { // Make a new bucket since the document is distant from all existing buckets // TODO: (maybe) Create a new bucket for all distant docs and merge down to shardSize buckets at end createAndAppendNewCluster(val); collectBucket(sub, doc, numClusters - 1); - if(val > clusterCentroids.get(bucketOrd)){ + if (val > clusterCentroids.get(bucketOrd)) { /* * If the new value is bigger than the nearest bucket then insert * just ahead of bucketOrd so that the array remains sorted. @@ -301,9 +300,9 @@ private void updateAvgBucketDistance() { /** * Creates a new cluster with value and appends it to the cluster arrays */ - private void createAndAppendNewCluster(double value){ + private void createAndAppendNewCluster(double value) { // Ensure there is space for the cluster - clusterMaxes = bigArrays().grow(clusterMaxes, numClusters + 1); // + 1 because indexing starts at 0 + clusterMaxes = bigArrays().grow(clusterMaxes, numClusters + 1); // + 1 because indexing starts at 0 clusterMins = bigArrays().grow(clusterMins, numClusters + 1); clusterCentroids = bigArrays().grow(clusterCentroids, numClusters + 1); clusterSizes = bigArrays().grow(clusterSizes, numClusters + 1); @@ -323,20 +322,20 @@ private void createAndAppendNewCluster(double value){ * * TODO: Make this more efficient */ - private void moveLastCluster(int index){ - if(index != numClusters - 1) { + private void moveLastCluster(int index) { + if (index != numClusters - 1) { // Move the cluster metadata - double holdMax = clusterMaxes.get(numClusters-1); - double holdMin = clusterMins.get(numClusters-1); - double holdCentroid = clusterCentroids.get(numClusters-1); - double holdSize = clusterSizes.get(numClusters-1); + double holdMax = clusterMaxes.get(numClusters - 1); + double holdMin = clusterMins.get(numClusters - 1); + double holdCentroid = clusterCentroids.get(numClusters - 1); + double holdSize = clusterSizes.get(numClusters - 1); for (int i = numClusters - 1; i > index; i--) { // The clusters in range {index ... numClusters - 1} move up 1 index to make room for the new cluster - clusterMaxes.set(i, clusterMaxes.get(i-1)); - clusterMins.set(i, clusterMins.get(i-1)); - clusterCentroids.set(i, clusterCentroids.get(i-1)); - clusterSizes.set(i, clusterSizes.get(i-1)); + clusterMaxes.set(i, clusterMaxes.get(i - 1)); + clusterMins.set(i, clusterMins.get(i - 1)); + clusterCentroids.set(i, clusterCentroids.get(i - 1)); + clusterSizes.set(i, clusterSizes.get(i - 1)); } clusterMaxes.set(index, holdMax); clusterMins.set(index, holdMin); @@ -347,13 +346,13 @@ private void moveLastCluster(int index){ LongUnaryOperator mergeMap = new LongUnaryOperator() { @Override public long applyAsLong(long i) { - if(i < index) { + if (i < index) { // The clusters in range {0 ... idx - 1} don't move return i; } - if(i == numClusters - 1) { + if (i == numClusters - 1) { // The new cluster moves to index - return (long)index; + return (long) index; } // The clusters in range {index ... numClusters - 1} shift forward return i + 1; @@ -371,7 +370,7 @@ public long applyAsLong(long i) { * Adds val to the cluster at index bucketOrd. * The cluster's centroid, min, max, and size are recalculated. */ - private void addToCluster(int bucketOrd, double val){ + private void addToCluster(int bucketOrd, double val) { assert bucketOrd < numClusters; double max = Math.max(clusterMaxes.get(bucketOrd), val); @@ -391,8 +390,8 @@ private void addToCluster(int bucketOrd, double val){ /** * Returns the ordinal of the bucket whose centroid is closest to val, or -1 if there are no buckets. **/ - private int getNearestBucket(double value){ - if (numClusters == 0){ + private int getNearestBucket(double value) { + if (numClusters == 0) { return -1; } BigArrays.DoubleBinarySearcher binarySearcher = new BigArrays.DoubleBinarySearcher(clusterCentroids); @@ -400,18 +399,19 @@ private int getNearestBucket(double value){ } @Override - int finalNumBuckets(){ + int finalNumBuckets() { return numClusters; } @Override - InternalVariableWidthHistogram.Bucket buildBucket(int bucketOrd, InternalAggregations subAggregations){ + InternalVariableWidthHistogram.Bucket buildBucket(int bucketOrd, InternalAggregations subAggregations) { return new InternalVariableWidthHistogram.Bucket( clusterCentroids.get(bucketOrd), new InternalVariableWidthHistogram.Bucket.BucketBounds(clusterMins.get(bucketOrd), clusterMaxes.get(bucketOrd)), bucketDocCount(bucketOrd), formatter, - subAggregations); + subAggregations + ); } @Override @@ -432,10 +432,17 @@ public void close() { private BestBucketsDeferringCollector deferringCollector; - VariableWidthHistogramAggregator(String name, AggregatorFactories factories, int numBuckets, int shardSize, - int initialBuffer, @Nullable ValuesSourceConfig valuesSourceConfig, - AggregationContext context, Aggregator parent, - Map metadata) throws IOException{ + VariableWidthHistogramAggregator( + String name, + AggregatorFactories factories, + int numBuckets, + int shardSize, + int initialBuffer, + @Nullable ValuesSourceConfig valuesSourceConfig, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, factories, context, parent, metadata); this.numBuckets = numBuckets; @@ -457,9 +464,16 @@ public void close() { * But the VariableWidthHistogram agg _must_ execute in breadth first since it relies on * deferring execution, so we just have to throw up our hands and refuse */ - throw new IllegalStateException("VariableWidthHistogram agg [" + name() + "] is the child of the nested agg [" + nestedAgg - + "], and also has a scoring child agg [" + scoringAgg + "]. This combination is not supported because " + - "it requires executing in [depth_first] mode, which the VariableWidthHistogram agg cannot do."); + throw new IllegalStateException( + "VariableWidthHistogram agg [" + + name() + + "] is the child of the nested agg [" + + nestedAgg + + "], and also has a scoring child agg [" + + scoringAgg + + "]. This combination is not supported because " + + "it requires executing in [depth_first] mode, which the VariableWidthHistogram agg cannot do." + ); } } @@ -507,17 +521,17 @@ protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucket return LeafBucketCollector.NO_OP_COLLECTOR; } final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx); - return new LeafBucketCollectorBase(sub, values){ + return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; - if(values.advanceExact(doc)){ + if (values.advanceExact(doc)) { final int valuesCount = values.docValueCount(); double prevVal = Double.NEGATIVE_INFINITY; for (int i = 0; i < valuesCount; ++i) { double val = values.nextValue(); assert val >= prevVal; - if (val == prevVal){ + if (val == prevVal) { continue; } @@ -528,7 +542,6 @@ public void collect(int doc, long bucket) throws IOException { }; } - @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { int numClusters = collector.finalNumBuckets(); @@ -545,13 +558,14 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I buckets.add(collector.buildBucket(bucketOrd, subAggregationResults[bucketOrd])); } - Function, InternalAggregation> resultBuilder = bucketsToFormat -> { + Function, InternalAggregation> resultBuilder = bucketsToFormat -> { // The contract of the histogram aggregation is that shards must return // buckets ordered by centroid in ascending order CollectionUtil.introSort(bucketsToFormat, BucketOrder.key(true).comparator()); InternalVariableWidthHistogram.EmptyBucketInfo emptyBucketInfo = new InternalVariableWidthHistogram.EmptyBucketInfo( - buildEmptySubAggregations()); + buildEmptySubAggregations() + ); return new InternalVariableWidthHistogram(name, bucketsToFormat, emptyBucketInfo, numBuckets, formatter, metadata()); }; @@ -578,4 +592,3 @@ public static int mergePhaseInitialBucketCount(int shardSize) { return (int) ((long) shardSize * 3 / 4); } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorFactory.java index af0c2e6dbae8c..17214e67ea2f8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorFactory.java @@ -28,7 +28,8 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { VariableWidthHistogramAggregationBuilder.REGISTRY_KEY, CoreValuesSourceType.NUMERIC, VariableWidthHistogramAggregator::new, - true); + true + ); } private final VariableWidthHistogramAggregatorSupplier aggregatorSupplier; @@ -36,16 +37,18 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { private final int shardSize; private final int initialBuffer; - VariableWidthHistogramAggregatorFactory(String name, - ValuesSourceConfig config, - int numBuckets, - int shardSize, - int initialBuffer, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - VariableWidthHistogramAggregatorSupplier aggregatorSupplier) throws IOException{ + VariableWidthHistogramAggregatorFactory( + String name, + ValuesSourceConfig config, + int numBuckets, + int shardSize, + int initialBuffer, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + VariableWidthHistogramAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; this.numBuckets = numBuckets; @@ -63,13 +66,21 @@ protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound c + "] cannot be nested inside an aggregation that collects more than a single bucket." ); } - return aggregatorSupplier - .build(name, factories, numBuckets, shardSize, initialBuffer, config, context, parent, metadata); + return aggregatorSupplier.build(name, factories, numBuckets, shardSize, initialBuffer, config, context, parent, metadata); } @Override protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - return new VariableWidthHistogramAggregator(name, factories, numBuckets, shardSize, initialBuffer, config, - context, parent, metadata); + return new VariableWidthHistogramAggregator( + name, + factories, + numBuckets, + shardSize, + initialBuffer, + config, + context, + parent, + metadata + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/Missing.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/Missing.java index c760b066441e1..30ef76b1451f1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/Missing.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/Missing.java @@ -12,5 +12,4 @@ /** * A {@code missing} aggregation. Defines a single bucket of all documents that are missing a specific field. */ -public interface Missing extends SingleBucketAggregation { -} +public interface Missing extends SingleBucketAggregation {} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java index 815e8a4ea5d37..1140c03c55aae 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java @@ -34,8 +34,10 @@ public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder PARSER = - ObjectParser.fromBuilder(NAME, MissingAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + MissingAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); } @@ -48,9 +50,11 @@ public MissingAggregationBuilder(String name) { super(name); } - protected MissingAggregationBuilder(MissingAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + protected MissingAggregationBuilder( + MissingAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -86,16 +90,16 @@ public BucketCardinality bucketCardinality() { return BucketCardinality.ONE; } - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { - MissingAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + MissingAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new MissingAggregatorFactory(name, config, context, - parent, subFactoriesBuilder, metadata, aggregatorSupplier); + return new MissingAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java index 20d54aabf5409..4077b4de7982d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java @@ -29,21 +29,21 @@ public class MissingAggregator extends BucketsAggregator implements SingleBucket private final ValuesSource valuesSource; public MissingAggregator( - String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - AggregationContext context, - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { super(name, factories, context, parent, cardinality, metadata); // TODO: Stop using nulls here this.valuesSource = valuesSourceConfig.hasValues() ? valuesSourceConfig.getValuesSource() : null; } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { final DocValueBits docsWithValue; if (valuesSource != null) { docsWithValue = valuesSource.docsWithValue(ctx); @@ -67,8 +67,15 @@ public void collect(int doc, long bucket) throws IOException { @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForSingleBucket(owningBucketOrds, (owningBucketOrd, subAggregationResults) -> - new InternalMissing(name, bucketDocCount(owningBucketOrd), subAggregationResults, metadata())); + return buildAggregationsForSingleBucket( + owningBucketOrds, + (owningBucketOrd, subAggregationResults) -> new InternalMissing( + name, + bucketDocCount(owningBucketOrd), + subAggregationResults, + metadata() + ) + ); } @Override @@ -77,5 +84,3 @@ public InternalAggregation buildEmptyAggregation() { } } - - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorFactory.java index 37388c181d263..f93b409bd86f5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorFactory.java @@ -29,10 +29,15 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { builder.register(MissingAggregationBuilder.REGISTRY_KEY, CoreValuesSourceType.ALL_CORE, MissingAggregator::new, true); } - public MissingAggregatorFactory(String name, ValuesSourceConfig config, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - MissingAggregatorSupplier aggregatorSupplier) throws IOException { + public MissingAggregatorFactory( + String name, + ValuesSourceConfig config, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + MissingAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; } @@ -43,10 +48,8 @@ protected MissingAggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - return aggregatorSupplier - .build(name, factories, config, context, parent, cardinality, metadata); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build(name, factories, config, context, parent, cardinality, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorSupplier.java index f17ddad898e96..43e8ec8fb6174 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorSupplier.java @@ -20,11 +20,13 @@ @FunctionalInterface public interface MissingAggregatorSupplier { - Aggregator build(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - AggregationContext aggregationContext, - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException; + Aggregator build( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + AggregationContext aggregationContext, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/Nested.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/Nested.java index 85981858872dd..c37164a3eddef 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/Nested.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/Nested.java @@ -12,5 +12,4 @@ /** * A {@code nested} aggregation. Defines a single bucket that holds all the nested documents of a specific path. */ -public interface Nested extends SingleBucketAggregation { -} +public interface Nested extends SingleBucketAggregation {} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java index d3e2bbc4d9ab5..437e62b129ab5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java @@ -82,14 +82,12 @@ public BucketCardinality bucketCardinality() { } @Override - protected AggregatorFactory doBuild(AggregationContext context, - AggregatorFactory parent, - Builder subFactoriesBuilder) throws IOException { + protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, Builder subFactoriesBuilder) + throws IOException { ObjectMapper childObjectMapper = context.getObjectMapper(path); if (childObjectMapper == null) { // in case the path has been unmapped: - return new NestedAggregatorFactory(name, null, null, context, - parent, subFactoriesBuilder, metadata); + return new NestedAggregatorFactory(name, null, null, context, parent, subFactoriesBuilder, metadata); } if (childObjectMapper.isNested() == false) { @@ -97,8 +95,15 @@ protected AggregatorFactory doBuild(AggregationContext context, } try { NestedObjectMapper parentObjectMapper = context.nestedScope().nextLevel((NestedObjectMapper) childObjectMapper); - return new NestedAggregatorFactory(name, parentObjectMapper, (NestedObjectMapper) childObjectMapper, context, - parent, subFactoriesBuilder, metadata); + return new NestedAggregatorFactory( + name, + parentObjectMapper, + (NestedObjectMapper) childObjectMapper, + context, + parent, + subFactoriesBuilder, + metadata + ); } finally { context.nestedScope().previousLevel(); } @@ -124,8 +129,10 @@ public static NestedAggregationBuilder parse(String aggregationName, XContentPar if (NestedAggregator.PATH_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { path = parser.text(); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]." + ); } } else { throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " in [" + aggregationName + "]."); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 72c3403fe2c6d..7ee4484c69b7f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.bucket.nested; import com.carrotsearch.hppc.LongArrayList; + import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; @@ -20,8 +21,8 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -58,8 +59,7 @@ public class NestedAggregator extends BucketsAggregator implements SingleBucketA ) throws IOException { super(name, factories, context, parent, cardinality, metadata); - Query parentFilter = parentObjectMapper != null ? parentObjectMapper.nestedTypeFilter() - : Queries.newNonNestedFilter(); + Query parentFilter = parentObjectMapper != null ? parentObjectMapper.nestedTypeFilter() : Queries.newNonNestedFilter(); this.parentFilter = context.bitsetFilterCache().getBitSetProducer(parentFilter); this.childFilter = childObjectMapper.nestedTypeFilter(); this.collectsFromSingleBucket = cardinality.map(estimate -> estimate < 2); @@ -120,8 +120,15 @@ private void processBufferedDocs() throws IOException { @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForSingleBucket(owningBucketOrds, (owningBucketOrd, subAggregationResults) -> - new InternalNested(name, bucketDocCount(owningBucketOrd), subAggregationResults, metadata())); + return buildAggregationsForSingleBucket( + owningBucketOrds, + (owningBucketOrd, subAggregationResults) -> new InternalNested( + name, + bucketDocCount(owningBucketOrd), + subAggregationResults, + metadata() + ) + ); } @Override @@ -178,7 +185,6 @@ void processBufferedChildBuckets() throws IOException { return; } - final int prevParentDoc = parentDocs.prevSetBit(currentParentDoc - 1); int childDocId = childDocs.docID(); if (childDocId <= prevParentDoc) { @@ -202,7 +208,9 @@ private static class CachedScorable extends Scorable { float score; @Override - public final float score() { return score; } + public final float score() { + return score; + } @Override public int docID() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java index a7d7a9e6d0cc0..8042c692c5e56 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java @@ -25,9 +25,15 @@ public class NestedAggregatorFactory extends AggregatorFactory { private final NestedObjectMapper parentObjectMapper; private final NestedObjectMapper childObjectMapper; - NestedAggregatorFactory(String name, NestedObjectMapper parentObjectMapper, NestedObjectMapper childObjectMapper, - AggregationContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactories, - Map metadata) throws IOException { + NestedAggregatorFactory( + String name, + NestedObjectMapper parentObjectMapper, + NestedObjectMapper childObjectMapper, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactories, + Map metadata + ) throws IOException { super(name, context, parent, subFactories, metadata); this.parentObjectMapper = parentObjectMapper; this.childObjectMapper = childObjectMapper; @@ -44,11 +50,8 @@ public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardin private static final class Unmapped extends NonCollectingAggregator { - Unmapped(String name, - AggregationContext context, - Aggregator parent, - AggregatorFactories factories, - Map metadata) throws IOException { + Unmapped(String name, AggregationContext context, Aggregator parent, AggregatorFactories factories, Map metadata) + throws IOException { super(name, context, parent, factories, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNested.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNested.java index 992509ad73ab8..2b4aca36c07c3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNested.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNested.java @@ -13,5 +13,4 @@ /** * A {@code reverse nested} aggregation. Defines a single bucket that holds all the parent documents for a specific nested path. */ -public interface ReverseNested extends SingleBucketAggregation { -} +public interface ReverseNested extends SingleBucketAggregation {} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregationBuilder.java index 6b62758a47e6c..bb18a00102398 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregationBuilder.java @@ -36,8 +36,7 @@ public ReverseNestedAggregationBuilder(String name) { super(name); } - public ReverseNestedAggregationBuilder(ReverseNestedAggregationBuilder clone, - Builder factoriesBuilder, Map map) { + public ReverseNestedAggregationBuilder(ReverseNestedAggregationBuilder clone, Builder factoriesBuilder, Map map) { super(clone, factoriesBuilder, map); this.path = clone.path; } @@ -107,8 +106,7 @@ protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactor NestedObjectMapper nestedMapper = (NestedObjectMapper) parentObjectMapper; try { nestedScope.nextLevel(nestedMapper); - return new ReverseNestedAggregatorFactory(name, false, nestedMapper, context, parent, subFactoriesBuilder, - metadata); + return new ReverseNestedAggregatorFactory(name, false, nestedMapper, context, parent, subFactoriesBuilder, metadata); } finally { nestedScope.previousLevel(); } @@ -146,16 +144,17 @@ public static ReverseNestedAggregationBuilder parse(String aggregationName, XCon if ("path".equals(currentFieldName)) { path = parser.text(); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]." + ); } } else { throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " in [" + aggregationName + "]."); } } - ReverseNestedAggregationBuilder factory = new ReverseNestedAggregationBuilder( - aggregationName); + ReverseNestedAggregationBuilder factory = new ReverseNestedAggregationBuilder(aggregationName); if (path != null) { factory.path(path); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java index 8b335056a78cd..1e7df769ca6b6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java @@ -8,13 +8,14 @@ package org.elasticsearch.search.aggregations.bucket.nested; import com.carrotsearch.hppc.LongIntHashMap; + import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -36,9 +37,15 @@ public class ReverseNestedAggregator extends BucketsAggregator implements Single private final Query parentFilter; private final BitSetProducer parentBitsetProducer; - public ReverseNestedAggregator(String name, AggregatorFactories factories, NestedObjectMapper objectMapper, - AggregationContext context, Aggregator parent, CardinalityUpperBound cardinality, Map metadata) - throws IOException { + public ReverseNestedAggregator( + String name, + AggregatorFactories factories, + NestedObjectMapper objectMapper, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { super(name, factories, context, parent, cardinality, metadata); if (objectMapper == null) { parentFilter = Queries.newNonNestedFilter(); @@ -81,8 +88,15 @@ public void collect(int childDoc, long bucket) throws IOException { @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForSingleBucket(owningBucketOrds, (owningBucketOrd, subAggregationResults) -> - new InternalReverseNested(name, bucketDocCount(owningBucketOrd), subAggregationResults, metadata())); + return buildAggregationsForSingleBucket( + owningBucketOrds, + (owningBucketOrd, subAggregationResults) -> new InternalReverseNested( + name, + bucketDocCount(owningBucketOrd), + subAggregationResults, + metadata() + ) + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorFactory.java index f03c6744e6c92..85dbc0850bbc6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorFactory.java @@ -25,10 +25,15 @@ public class ReverseNestedAggregatorFactory extends AggregatorFactory { private final boolean unmapped; private final NestedObjectMapper parentObjectMapper; - public ReverseNestedAggregatorFactory(String name, boolean unmapped, NestedObjectMapper parentObjectMapper, - AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactories, - Map metadata) throws IOException { + public ReverseNestedAggregatorFactory( + String name, + boolean unmapped, + NestedObjectMapper parentObjectMapper, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactories, + Map metadata + ) throws IOException { super(name, context, parent, subFactories, metadata); this.unmapped = unmapped; this.parentObjectMapper = parentObjectMapper; @@ -46,11 +51,8 @@ public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardin private static final class Unmapped extends NonCollectingAggregator { - Unmapped(String name, - AggregationContext context, - Aggregator parent, - AggregatorFactories factories, - Map metadata) throws IOException { + Unmapped(String name, AggregationContext context, Aggregator parent, AggregatorFactories factories, Map metadata) + throws IOException { super(name, context, parent, factories, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeAggregatorFactory.java index 34b7f7c86811c..a084f251693a4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeAggregatorFactory.java @@ -30,17 +30,19 @@ public class AbstractRangeAggregatorFactory extends ValuesSourc private final ValuesSourceRegistry.RegistryKey registryKey; private final RangeAggregatorSupplier aggregatorSupplier; - public AbstractRangeAggregatorFactory(String name, - ValuesSourceRegistry.RegistryKey registryKey, - ValuesSourceConfig config, - R[] ranges, - boolean keyed, - InternalRange.Factory rangeFactory, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - RangeAggregatorSupplier aggregatorSupplier) throws IOException { + public AbstractRangeAggregatorFactory( + String name, + ValuesSourceRegistry.RegistryKey registryKey, + ValuesSourceConfig config, + R[] ranges, + boolean keyed, + InternalRange.Factory rangeFactory, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + RangeAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.ranges = ranges; this.keyed = keyed; @@ -55,23 +57,8 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build( - name, - factories, - config, - rangeFactory, - ranges, - keyed, - context, - parent, - cardinality, - metadata - ); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build(name, factories, config, rangeFactory, ranges, keyed, context, parent, cardinality, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java index 314876ce8bd6a..c5d15de4927a3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java @@ -25,8 +25,8 @@ import java.util.Objects; import java.util.function.Function; -public abstract class AbstractRangeBuilder, R extends Range> - extends ValuesSourceAggregationBuilder { +public abstract class AbstractRangeBuilder, R extends Range> extends ValuesSourceAggregationBuilder< + AB> { protected final InternalRange.Factory rangeFactory; protected List ranges = new ArrayList<>(); @@ -37,8 +37,11 @@ protected AbstractRangeBuilder(String name, InternalRange.Factory rangeFac this.rangeFactory = rangeFactory; } - protected AbstractRangeBuilder(AbstractRangeBuilder clone, - AggregatorFactories.Builder factoriesBuilder, Map metadata) { + protected AbstractRangeBuilder( + AbstractRangeBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.rangeFactory = clone.rangeFactory; this.ranges = new ArrayList<>(clone.ranges); @@ -49,7 +52,7 @@ protected AbstractRangeBuilder(AbstractRangeBuilder clone, * Read from a stream. */ protected AbstractRangeBuilder(StreamInput in, InternalRange.Factory rangeFactory, Writeable.Reader rangeReader) - throws IOException { + throws IOException { super(in); this.rangeFactory = rangeFactory; ranges = in.readList(rangeReader); @@ -58,7 +61,7 @@ protected AbstractRangeBuilder(StreamInput in, InternalRange.Factory range @Override protected ValuesSourceType defaultValueSourceType() { - // Copied over from the old targetValueType setting. Not sure what cases this is still relevant for. --Tozzi 2020-01-13 + // Copied over from the old targetValueType setting. Not sure what cases this is still relevant for. --Tozzi 2020-01-13 return rangeFactory.getValueSourceType(); } @@ -152,7 +155,6 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; AbstractRangeBuilder other = (AbstractRangeBuilder) obj; - return Objects.equals(ranges, other.ranges) - && Objects.equals(keyed, other.keyed); + return Objects.equals(ranges, other.ranges) && Objects.equals(keyed, other.keyed); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 9ebd994347b65..263da8f82de0c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -55,9 +55,7 @@ public Range(String key, BytesRef from, BytesRef to) { }; private static int compare(BytesRef a, BytesRef b, int m) { - return a == null - ? b == null ? 0 : -m - : b == null ? m : a.compareTo(b); + return a == null ? b == null ? 0 : -m : b == null ? m : a.compareTo(b); } final ValuesSource.Bytes valuesSource; @@ -65,10 +63,18 @@ private static int compare(BytesRef a, BytesRef b, int m) { final boolean keyed; final Range[] ranges; - public BinaryRangeAggregator(String name, AggregatorFactories factories, - ValuesSource valuesSource, DocValueFormat format, - List ranges, boolean keyed, AggregationContext context, - Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { + public BinaryRangeAggregator( + String name, + AggregatorFactories factories, + ValuesSource valuesSource, + DocValueFormat format, + List ranges, + boolean keyed, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { super(name, factories, context, parent, cardinality.multiply(ranges.size()), metadata); this.valuesSource = (ValuesSource.Bytes) valuesSource; this.format = format; @@ -103,8 +109,7 @@ protected void doCollect(LeafBucketCollector sub, int doc, long bucket) throws I SortedBinaryDocValues values = valuesSource.bytesValues(ctx); return new SortedBinaryRangeLeafCollector(values, ranges, sub) { @Override - protected void doCollect(LeafBucketCollector sub, int doc, long bucket) - throws IOException { + protected void doCollect(LeafBucketCollector sub, int doc, long bucket) throws IOException { collectBucket(sub, doc, bucket); } }; @@ -117,11 +122,10 @@ abstract static class SortedSetRangeLeafCollector extends LeafBucketCollectorBas final SortedSetDocValues values; final LeafBucketCollector sub; - SortedSetRangeLeafCollector(SortedSetDocValues values, - Range[] ranges, LeafBucketCollector sub) throws IOException { + SortedSetRangeLeafCollector(SortedSetDocValues values, Range[] ranges, LeafBucketCollector sub) throws IOException { super(sub, values); for (int i = 1; i < ranges.length; ++i) { - if (RANGE_COMPARATOR.compare(ranges[i-1], ranges[i]) > 0) { + if (RANGE_COMPARATOR.compare(ranges[i - 1], ranges[i]) > 0) { throw new IllegalArgumentException("Ranges must be sorted"); } } @@ -152,7 +156,7 @@ abstract static class SortedSetRangeLeafCollector extends LeafBucketCollectorBas } maxTos[0] = tos[0]; for (int i = 1; i < tos.length; ++i) { - maxTos[i] = Math.max(maxTos[i-1], tos[i]); + maxTos[i] = Math.max(maxTos[i - 1], tos[i]); } } @@ -160,9 +164,7 @@ abstract static class SortedSetRangeLeafCollector extends LeafBucketCollectorBas public void collect(int doc, long bucket) throws IOException { if (values.advanceExact(doc)) { int lo = 0; - for (long ord = values - .nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values - .nextOrd()) { + for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { lo = collect(doc, ord, bucket, lo); } } @@ -227,11 +229,10 @@ abstract static class SortedBinaryRangeLeafCollector extends LeafBucketCollector final SortedBinaryDocValues values; final LeafBucketCollector sub; - SortedBinaryRangeLeafCollector(SortedBinaryDocValues values, - Range[] ranges, LeafBucketCollector sub) { + SortedBinaryRangeLeafCollector(SortedBinaryDocValues values, Range[] ranges, LeafBucketCollector sub) { super(sub, values); for (int i = 1; i < ranges.length; ++i) { - if (RANGE_COMPARATOR.compare(ranges[i-1], ranges[i]) > 0) { + if (RANGE_COMPARATOR.compare(ranges[i - 1], ranges[i]) > 0) { throw new IllegalArgumentException("Ranges must be sorted"); } } @@ -243,10 +244,10 @@ abstract static class SortedBinaryRangeLeafCollector extends LeafBucketCollector maxTos[0] = ranges[0].to; } for (int i = 1; i < ranges.length; ++i) { - if (compare(ranges[i].to, maxTos[i-1], -1) >= 0) { + if (compare(ranges[i].to, maxTos[i - 1], -1) >= 0) { maxTos[i] = ranges[i].to; } else { - maxTos[i] = maxTos[i-1]; + maxTos[i] = maxTos[i - 1]; } } } @@ -316,11 +317,15 @@ private int collect(int doc, BytesRef value, long bucket, int lowBound) throws I @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForFixedBucketCount(owningBucketOrds, ranges.length, + return buildAggregationsForFixedBucketCount( + owningBucketOrds, + ranges.length, (offsetInOwningOrd, docCount, subAggregationResults) -> { Range range = ranges[offsetInOwningOrd]; return new InternalBinaryRange.Bucket(format, keyed, range.key, range.from, range.to, docCount, subAggregationResults); - }, buckets -> new InternalBinaryRange(name, format, keyed, buckets, metadata())); + }, + buckets -> new InternalBinaryRange(name, format, keyed, buckets, metadata()) + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorFactory.java index 52c62ba89d6c0..0b946acfb8334 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorFactory.java @@ -31,13 +31,17 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { private final List ranges; private final boolean keyed; - public BinaryRangeAggregatorFactory(String name, - ValuesSourceConfig config, - List ranges, boolean keyed, - AggregationContext context, - AggregatorFactory parent, Builder subFactoriesBuilder, - Map metadata, - IpRangeAggregatorSupplier aggregatorSupplier) throws IOException { + public BinaryRangeAggregatorFactory( + String name, + ValuesSourceConfig config, + List ranges, + boolean keyed, + AggregationContext context, + AggregatorFactory parent, + Builder subFactoriesBuilder, + Map metadata, + IpRangeAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; this.ranges = ranges; @@ -46,19 +50,35 @@ public BinaryRangeAggregatorFactory(String name, @Override protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - return new BinaryRangeAggregator(name, factories, null, config.format(), - ranges, keyed, context, parent, CardinalityUpperBound.NONE, metadata); + return new BinaryRangeAggregator( + name, + factories, + null, + config.format(), + ranges, + keyed, + context, + parent, + CardinalityUpperBound.NONE, + metadata + ); } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build(name, factories, config.getValuesSource(), config.format(), - ranges, keyed, context, parent, cardinality, metadata); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build( + name, + factories, + config.getValuesSource(), + config.format(), + ranges, + keyed, + context, + parent, + cardinality, + metadata + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java index 35035cf55b0e6..d7e18e2231e46 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java @@ -37,8 +37,10 @@ public class DateRangeAggregationBuilder extends AbstractRangeBuilder PARSER = - ObjectParser.fromBuilder(NAME, DateRangeAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + DateRangeAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, true); PARSER.declareBoolean(DateRangeAggregationBuilder::keyed, RangeAggregator.KEYED_FIELD); @@ -51,13 +53,9 @@ public class DateRangeAggregationBuilder extends AbstractRangeBuilder metadata) { + protected DateRangeAggregationBuilder( + DateRangeAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -336,11 +336,13 @@ public DateRangeAggregationBuilder addUnboundedFrom(ZonedDateTime from) { } @Override - protected DateRangeAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - RangeAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + protected DateRangeAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + RangeAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); // We need to call processRanges here so they are parsed and we know whether `now` has been used before we make // the decision of whether to cache the request RangeAggregator.Range[] ranges = processRanges(range -> { @@ -367,7 +369,17 @@ protected DateRangeAggregatorFactory innerBuild(AggregationContext context, Valu if (ranges.length == 0) { throw new IllegalArgumentException("No [ranges] specified for the [" + this.getName() + "] aggregation"); } - return new DateRangeAggregatorFactory(name, config, ranges, keyed, rangeFactory, context, parent, subFactoriesBuilder, - metadata, aggregatorSupplier); + return new DateRangeAggregatorFactory( + name, + config, + ranges, + keyed, + rangeFactory, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregationBuilder.java index a89edf09fe7ea..2a6a7bc5edd68 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregationBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.bucket.range; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; @@ -16,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; @@ -62,14 +62,26 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde } }, (p, c) -> GeoDistanceAggregationBuilder.parseRange(p), RangeAggregator.RANGES_FIELD); - PARSER.declareField(GeoDistanceAggregationBuilder::unit, p -> DistanceUnit.fromString(p.text()), - UNIT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField( + GeoDistanceAggregationBuilder::unit, + p -> DistanceUnit.fromString(p.text()), + UNIT_FIELD, + ObjectParser.ValueType.STRING + ); - PARSER.declareField(GeoDistanceAggregationBuilder::distanceType, p -> GeoDistance.fromString(p.text()), - DISTANCE_TYPE_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField( + GeoDistanceAggregationBuilder::distanceType, + p -> GeoDistance.fromString(p.text()), + DISTANCE_TYPE_FIELD, + ObjectParser.ValueType.STRING + ); - PARSER.declareField(GeoDistanceAggregationBuilder::origin, GeoDistanceAggregationBuilder::parseGeoPoint, - ORIGIN_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); + PARSER.declareField( + GeoDistanceAggregationBuilder::origin, + GeoDistanceAggregationBuilder::parseGeoPoint, + ORIGIN_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING + ); } public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { @@ -127,8 +139,10 @@ private static GeoPoint parseGeoPoint(XContentParser parser) throws IOException } else if (Double.isNaN(lat)) { lat = parser.doubleValue(); } else { - throw new ParsingException(parser.getTokenLocation(), "malformed [" + ORIGIN_FIELD.getPreferredName() - + "]: a geo point array must be of the form [lon, lat]"); + throw new ParsingException( + parser.getTokenLocation(), + "malformed [" + ORIGIN_FIELD.getPreferredName() + "]: a geo point array must be of the form [lon, lat]" + ); } } return new GeoPoint(lat, lon); @@ -149,8 +163,10 @@ private static GeoPoint parseGeoPoint(XContentParser parser) throws IOException } } if (Double.isNaN(lat) || Double.isNaN(lon)) { - throw new ParsingException(parser.getTokenLocation(), - "malformed [" + currentFieldName + "] geo point object. either [lat] or [lon] (or both) are " + "missing"); + throw new ParsingException( + parser.getTokenLocation(), + "malformed [" + currentFieldName + "] geo point object. either [lat] or [lon] (or both) are " + "missing" + ); } return new GeoPoint(lat, lon); } @@ -224,13 +240,15 @@ public GeoDistanceAggregationBuilder(String name, GeoPoint origin) { } } - private GeoDistanceAggregationBuilder(String name, GeoPoint origin, - InternalRange.Factory rangeFactory) { + private GeoDistanceAggregationBuilder( + String name, + GeoPoint origin, + InternalRange.Factory rangeFactory + ) { super(name); this.origin = origin; } - /** * Read from a stream. */ @@ -423,20 +441,33 @@ public BucketCardinality bucketCardinality() { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - Builder subFactoriesBuilder) throws IOException { - GeoDistanceAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + Builder subFactoriesBuilder + ) throws IOException { + GeoDistanceAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); Range[] ranges = this.ranges.toArray(new Range[this.range().size()]); if (ranges.length == 0) { throw new IllegalArgumentException("No [ranges] specified for the [" + this.getName() + "] aggregation"); } - return new GeoDistanceRangeAggregatorFactory(name, config, origin, ranges, unit, distanceType, keyed, context, parent, - subFactoriesBuilder, metadata, aggregatorSupplier); + return new GeoDistanceRangeAggregatorFactory( + name, + config, + origin, + ranges, + unit, + distanceType, + keyed, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override @@ -461,10 +492,10 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; GeoDistanceAggregationBuilder other = (GeoDistanceAggregationBuilder) obj; return Objects.equals(origin, other.origin) - && Objects.equals(ranges, other.ranges) - && Objects.equals(keyed, other.keyed) - && Objects.equals(distanceType, other.distanceType) - && Objects.equals(unit, other.unit); + && Objects.equals(ranges, other.ranges) + && Objects.equals(keyed, other.keyed) + && Objects.equals(distanceType, other.distanceType) + && Objects.equals(unit, other.unit); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregatorSupplier.java index 905be289849b2..6864ed8115529 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceAggregatorSupplier.java @@ -35,5 +35,6 @@ Aggregator build( AggregationContext context, Aggregator parent, CardinalityUpperBound cardinality, - Map metadata) throws IOException; + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceRangeAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceRangeAggregatorFactory.java index 1e1314e3da063..3fa68e5ee6bdd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceRangeAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/GeoDistanceRangeAggregatorFactory.java @@ -70,7 +70,8 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { metadata ); }, - true); + true + ); } private final GeoDistanceAggregatorSupplier aggregatorSupplier; @@ -81,12 +82,20 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { private final GeoDistance distanceType; private final boolean keyed; - public GeoDistanceRangeAggregatorFactory(String name, ValuesSourceConfig config, GeoPoint origin, - Range[] ranges, DistanceUnit unit, GeoDistance distanceType, boolean keyed, - AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - GeoDistanceAggregatorSupplier aggregatorSupplier) throws IOException { + public GeoDistanceRangeAggregatorFactory( + String name, + ValuesSourceConfig config, + GeoPoint origin, + Range[] ranges, + DistanceUnit unit, + GeoDistance distanceType, + boolean keyed, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + GeoDistanceAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; this.origin = origin; @@ -98,33 +107,28 @@ public GeoDistanceRangeAggregatorFactory(String name, ValuesSourceConfig config, @Override protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - return new RangeAggregator.Unmapped<>(name, factories, ranges, keyed, config.format(), context, parent, - rangeFactory, metadata); + return new RangeAggregator.Unmapped<>(name, factories, ranges, keyed, config.format(), context, parent, rangeFactory, metadata); } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build( - name, - factories, - distanceType, - origin, - unit, - config.getValuesSource(), - config.format(), - rangeFactory, - ranges, - keyed, - context, - parent, - cardinality, - metadata - ); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build( + name, + factories, + distanceType, + origin, + unit, + config.getValuesSource(), + config.format(), + rangeFactory, + ranges, + keyed, + context, + parent, + cardinality, + metadata + ); } private static class DistanceSource extends ValuesSource.Numeric { @@ -134,8 +138,12 @@ private static class DistanceSource extends ValuesSource.Numeric { private final DistanceUnit units; private final org.elasticsearch.common.geo.GeoPoint origin; - DistanceSource(ValuesSource.GeoPoint source, GeoDistance distanceType, - org.elasticsearch.common.geo.GeoPoint origin, DistanceUnit units) { + DistanceSource( + ValuesSource.GeoPoint source, + GeoDistance distanceType, + org.elasticsearch.common.geo.GeoPoint origin, + DistanceUnit units + ) { this.source = source; // even if the geo points are unique, there's no guarantee the // distances are diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java index d75db80229740..9ac3840104c1e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java @@ -29,9 +29,9 @@ import static java.util.Collections.unmodifiableList; /** A range aggregation for data that is encoded in doc values using a binary representation. */ -public final class InternalBinaryRange - extends InternalMultiBucketAggregation - implements Range { +public final class InternalBinaryRange extends InternalMultiBucketAggregation + implements + Range { public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Range.Bucket { @@ -42,8 +42,15 @@ public static class Bucket extends InternalMultiBucketAggregation.InternalBucket private final long docCount; private final InternalAggregations aggregations; - public Bucket(DocValueFormat format, boolean keyed, String key, BytesRef from, BytesRef to, - long docCount, InternalAggregations aggregations) { + public Bucket( + DocValueFormat format, + boolean keyed, + String key, + BytesRef from, + BytesRef to, + long docCount, + InternalAggregations aggregations + ) { this.format = format; this.keyed = keyed; this.key = key != null ? key : generateKey(from, to, format); @@ -54,8 +61,7 @@ public Bucket(DocValueFormat format, boolean keyed, String key, BytesRef from, B } private static String generateKey(BytesRef from, BytesRef to, DocValueFormat format) { - StringBuilder builder = new StringBuilder() - .append(from == null ? "*" : format.format(from)) + StringBuilder builder = new StringBuilder().append(from == null ? "*" : format.format(from)) .append("-") .append(to == null ? "*" : format.format(to)); return builder.toString(); @@ -156,10 +162,10 @@ public boolean equals(Object o) { if (docCount != bucket.docCount) return false; // keyed and format are ignored since they are already tested on the InternalBinaryRange object - return Objects.equals(key, bucket.key) && - Objects.equals(from, bucket.from) && - Objects.equals(to, bucket.to) && - Objects.equals(aggregations, bucket.aggregations); + return Objects.equals(key, bucket.key) + && Objects.equals(from, bucket.from) + && Objects.equals(to, bucket.to) + && Objects.equals(aggregations, bucket.aggregations); } @Override @@ -238,8 +244,17 @@ public InternalAggregation reduce(List aggregations, Reduce List buckets = new ArrayList<>(this.buckets.size()); for (int i = 0; i < this.buckets.size(); ++i) { Bucket b = this.buckets.get(i); - buckets.add(new Bucket(format, keyed, b.key, b.from, b.to, docCounts[i], - InternalAggregations.reduce(Arrays.asList(aggs[i]), reduceContext))); + buckets.add( + new Bucket( + format, + keyed, + b.key, + b.from, + b.to, + docCounts[i], + InternalAggregations.reduce(Arrays.asList(aggs[i]), reduceContext) + ) + ); } return new InternalBinaryRange(name, format, keyed, buckets, metadata); } @@ -253,8 +268,7 @@ protected Bucket reduceBucket(List buckets, ReduceContext context) { } @Override - public XContentBuilder doXContentBody(XContentBuilder builder, - Params params) throws IOException { + public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { if (keyed) { builder.startObject(CommonFields.BUCKETS.getPreferredName()); } else { @@ -271,7 +285,6 @@ public XContentBuilder doXContentBody(XContentBuilder builder, return builder; } - @Override public boolean equals(Object obj) { if (this == obj) return true; @@ -279,9 +292,7 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalBinaryRange that = (InternalBinaryRange) obj; - return Objects.equals(buckets, that.buckets) - && Objects.equals(format, that.format) - && Objects.equals(keyed, that.keyed); + return Objects.equals(buckets, that.buckets) && Objects.equals(format, that.format) && Objects.equals(keyed, that.keyed); } public int hashCode() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java index fb925e818618d..999d37e1fe65a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java @@ -25,26 +25,42 @@ public class InternalDateRange extends InternalRange aggregations, boolean keyed, - DocValueFormat formatter) { + public Bucket( + String key, + double from, + double to, + long docCount, + List aggregations, + boolean keyed, + DocValueFormat formatter + ) { super(key, from, to, docCount, InternalAggregations.from(aggregations), keyed, formatter); } - public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed, - DocValueFormat formatter) { + public Bucket( + String key, + double from, + double to, + long docCount, + InternalAggregations aggregations, + boolean keyed, + DocValueFormat formatter + ) { super(key, from, to, docCount, aggregations, keyed, formatter); } @Override public ZonedDateTime getFrom() { - return Double.isInfinite(((Number) from).doubleValue()) ? null : - Instant.ofEpochMilli(((Number) from).longValue()).atZone(ZoneOffset.UTC); + return Double.isInfinite(((Number) from).doubleValue()) + ? null + : Instant.ofEpochMilli(((Number) from).longValue()).atZone(ZoneOffset.UTC); } @Override public ZonedDateTime getTo() { - return Double.isInfinite(((Number) to).doubleValue()) ? null : - Instant.ofEpochMilli(((Number) to).longValue()).atZone(ZoneOffset.UTC); + return Double.isInfinite(((Number) to).doubleValue()) + ? null + : Instant.ofEpochMilli(((Number) to).longValue()).atZone(ZoneOffset.UTC); } private Double internalGetFrom() { @@ -76,8 +92,13 @@ public ValueType getValueType() { } @Override - public InternalDateRange create(String name, List ranges, DocValueFormat formatter, boolean keyed, - Map metadata) { + public InternalDateRange create( + String name, + List ranges, + DocValueFormat formatter, + boolean keyed, + Map metadata + ) { return new InternalDateRange(name, ranges, formatter, keyed, metadata); } @@ -88,20 +109,39 @@ public InternalDateRange create(List ranges, InternalDateRange prototype } @Override - public Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed, - DocValueFormat formatter) { + public Bucket createBucket( + String key, + double from, + double to, + long docCount, + InternalAggregations aggregations, + boolean keyed, + DocValueFormat formatter + ) { return new Bucket(key, from, to, docCount, aggregations, keyed, formatter); } @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.getKey(), prototype.internalGetFrom(), prototype.internalGetTo(), - prototype.getDocCount(), aggregations, prototype.getKeyed(), prototype.getFormat()); + return new Bucket( + prototype.getKey(), + prototype.internalGetFrom(), + prototype.internalGetTo(), + prototype.getDocCount(), + aggregations, + prototype.getKeyed(), + prototype.getFormat() + ); } } - InternalDateRange(String name, List ranges, DocValueFormat formatter, boolean keyed, - Map metadata) { + InternalDateRange( + String name, + List ranges, + DocValueFormat formatter, + boolean keyed, + Map metadata + ) { super(name, ranges, formatter, keyed, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java index 647d1f0fdbf28..028fce1b4c567 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java @@ -49,8 +49,13 @@ public ValueType getValueType() { } @Override - public InternalGeoDistance create(String name, List ranges, DocValueFormat format, boolean keyed, - Map metadata) { + public InternalGeoDistance create( + String name, + List ranges, + DocValueFormat format, + boolean keyed, + Map metadata + ) { return new InternalGeoDistance(name, ranges, keyed, metadata); } @@ -60,15 +65,28 @@ public InternalGeoDistance create(List ranges, InternalGeoDistance proto } @Override - public Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed, - DocValueFormat format) { + public Bucket createBucket( + String key, + double from, + double to, + long docCount, + InternalAggregations aggregations, + boolean keyed, + DocValueFormat format + ) { return new Bucket(key, from, to, docCount, aggregations, keyed); } @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.getKey(), ((Number) prototype.getFrom()).doubleValue(), ((Number) prototype.getTo()).doubleValue(), - prototype.getDocCount(), aggregations, prototype.getKeyed()); + return new Bucket( + prototype.getKey(), + ((Number) prototype.getFrom()).doubleValue(), + ((Number) prototype.getTo()).doubleValue(), + prototype.getDocCount(), + aggregations, + prototype.getKeyed() + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java index 83bc3a43b2db4..b23e60b3ff571 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java @@ -25,7 +25,8 @@ import java.util.Objects; public class InternalRange> extends InternalMultiBucketAggregation - implements Range { + implements + Range { @SuppressWarnings("rawtypes") static final Factory FACTORY = new Factory(); @@ -39,8 +40,15 @@ public static class Bucket extends InternalMultiBucketAggregation.InternalBucket private final InternalAggregations aggregations; private final String key; - public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed, - DocValueFormat format) { + public Bucket( + String key, + double from, + double to, + long docCount, + InternalAggregations aggregations, + boolean keyed, + DocValueFormat format + ) { this.keyed = keyed; this.format = format; this.key = key != null ? key : generateKey(from, to, format); @@ -138,8 +146,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } private static String generateKey(double from, double to, DocValueFormat format) { - StringBuilder builder = new StringBuilder() - .append(Double.isInfinite(from) ? "*" : format.format(from)) + StringBuilder builder = new StringBuilder().append(Double.isInfinite(from) ? "*" : format.format(from)) .append("-") .append(Double.isInfinite(to) ? "*" : format.format(to)); return builder.toString(); @@ -164,10 +171,10 @@ public boolean equals(Object other) { } Bucket that = (Bucket) other; return Objects.equals(from, that.from) - && Objects.equals(to, that.to) - && Objects.equals(docCount, that.docCount) - && Objects.equals(aggregations, that.aggregations) - && Objects.equals(key, that.key); + && Objects.equals(to, that.to) + && Objects.equals(docCount, that.docCount) + && Objects.equals(aggregations, that.aggregations) + && Objects.equals(key, that.key); } @Override @@ -191,8 +198,15 @@ public R create(String name, List ranges, DocValueFormat format, boolean keye } @SuppressWarnings("unchecked") - public B createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed, - DocValueFormat format) { + public B createBucket( + String key, + double from, + double to, + long docCount, + InternalAggregations aggregations, + boolean keyed, + DocValueFormat format + ) { return (B) new Bucket(key, from, to, docCount, aggregations, keyed, format); } @@ -203,8 +217,15 @@ public R create(List ranges, R prototype) { @SuppressWarnings("unchecked") public B createBucket(InternalAggregations aggregations, B prototype) { - return (B) new Bucket(prototype.getKey(), prototype.from, prototype.to, prototype.getDocCount(), aggregations, prototype.keyed, - prototype.format); + return (B) new Bucket( + prototype.getKey(), + prototype.from, + prototype.to, + prototype.getDocCount(), + aggregations, + prototype.keyed, + prototype.format + ); } } @@ -230,8 +251,17 @@ public InternalRange(StreamInput in) throws IOException { List ranges = new ArrayList<>(size); for (int i = 0; i < size; i++) { String key = in.readString(); - ranges.add(getFactory().createBucket(key, in.readDouble(), in.readDouble(), in.readVLong(), - InternalAggregations.readFrom(in), keyed, format)); + ranges.add( + getFactory().createBucket( + key, + in.readDouble(), + in.readDouble(), + in.readVLong(), + InternalAggregations.readFrom(in), + keyed, + format + ) + ); } this.ranges = ranges; } @@ -339,9 +369,7 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; - InternalRange that = (InternalRange) obj; - return Objects.equals(ranges, that.ranges) - && Objects.equals(format, that.format) - && Objects.equals(keyed, that.keyed); + InternalRange that = (InternalRange) obj; + return Objects.equals(ranges, that.ranges) && Objects.equals(format, that.format) && Objects.equals(keyed, that.keyed); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregationBuilder.java index 4d2154089c1ee..f361c3f9f420c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregationBuilder.java @@ -9,17 +9,17 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.core.Tuple; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; @@ -41,7 +41,6 @@ import java.util.Map; import java.util.Objects; - public final class IpRangeAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = "ip_range"; public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>( @@ -50,15 +49,18 @@ public final class IpRangeAggregationBuilder extends ValuesSourceAggregationBuil ); private static final ParseField MASK_FIELD = new ParseField("mask"); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, IpRangeAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + IpRangeAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, false, false, false); PARSER.declareBoolean(IpRangeAggregationBuilder::keyed, RangeAggregator.KEYED_FIELD); PARSER.declareObjectArray((agg, ranges) -> { - for (Range range : ranges) agg.addRange(range); + for (Range range : ranges) + agg.addRange(range); }, (p, c) -> IpRangeAggregationBuilder.parseRange(p), RangeAggregator.RANGES_FIELD); } @@ -176,9 +178,7 @@ public boolean equals(Object obj) { return false; } Range that = (Range) obj; - return Objects.equals(key, that.key) - && Objects.equals(from, that.from) - && Objects.equals(to, that.to); + return Objects.equals(key, that.key) && Objects.equals(from, that.from) && Objects.equals(to, that.to); } @Override @@ -216,7 +216,7 @@ public IpRangeAggregationBuilder(String name) { protected IpRangeAggregationBuilder(IpRangeAggregationBuilder clone, Builder factoriesBuilder, Map metadata) { super(clone, factoriesBuilder, metadata); - this.ranges = new ArrayList<>(clone.ranges); + this.ranges = new ArrayList<>(clone.ranges); this.keyed = clone.keyed; } @@ -371,21 +371,32 @@ public BucketCardinality bucketCardinality() { @Override protected ValuesSourceAggregatorFactory innerBuild( - AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { - IpRangeAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + Builder subFactoriesBuilder + ) throws IOException { + IpRangeAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); List ranges = new ArrayList<>(); - if(this.ranges.size() == 0){ + if (this.ranges.size() == 0) { throw new IllegalArgumentException("No [ranges] specified for the [" + this.getName() + "] aggregation"); } for (Range range : this.ranges) { ranges.add(new BinaryRangeAggregator.Range(range.key, toBytesRef(range.from), toBytesRef(range.to))); } - return new BinaryRangeAggregatorFactory(name, config, ranges, - keyed, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); + return new BinaryRangeAggregatorFactory( + name, + config, + ranges, + keyed, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override @@ -406,7 +417,6 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; IpRangeAggregationBuilder that = (IpRangeAggregationBuilder) obj; - return keyed == that.keyed - && ranges.equals(that.ranges); + return keyed == that.keyed && ranges.equals(that.ranges); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorSupplier.java index 21d1ffb2a9318..deba42dffeeed 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorSupplier.java @@ -21,14 +21,16 @@ public interface IpRangeAggregatorSupplier { - Aggregator build(String name, - AggregatorFactories factories, - ValuesSource valuesSource, - DocValueFormat format, - List ranges, - boolean keyed, - AggregationContext context, - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException; + Aggregator build( + String name, + AggregatorFactories factories, + ValuesSource valuesSource, + DocValueFormat format, + List ranges, + boolean keyed, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedBinaryRange.java index 4ecc502966668..b4920fc23e7d0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedBinaryRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedBinaryRange.java @@ -34,12 +34,17 @@ public List getBuckets() { return buckets; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedBinaryRange.class.getSimpleName(), true, ParsedBinaryRange::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedBinaryRange.class.getSimpleName(), + true, + ParsedBinaryRange::new + ); static { - declareMultiBucketAggregationFields(PARSER, - parser -> ParsedBucket.fromXContent(parser, false), - parser -> ParsedBucket.fromXContent(parser, true)); + declareMultiBucketAggregationFields( + PARSER, + parser -> ParsedBucket.fromXContent(parser, false), + parser -> ParsedBucket.fromXContent(parser, true) + ); } public static ParsedBinaryRange fromXContent(XContentParser parser, String name) throws IOException { @@ -131,8 +136,12 @@ static ParsedBucket fromXContent(final XContentParser parser, final boolean keye bucket.to = parser.text(); } } else if (token == XContentParser.Token.START_OBJECT) { - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, - aggregations::add); + XContentParserUtils.parseTypedKeysObject( + parser, + Aggregation.TYPED_KEYS_DELIMITER, + Aggregation.class, + aggregations::add + ); } } bucket.setAggregations(new Aggregations(aggregations)); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java index ac401c9199aec..590cd1777342c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java @@ -23,12 +23,17 @@ public String getType() { return DateRangeAggregationBuilder.NAME; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedDateRange.class.getSimpleName(), true, ParsedDateRange::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedDateRange.class.getSimpleName(), + true, + ParsedDateRange::new + ); static { - declareParsedRangeFields(PARSER, - parser -> ParsedBucket.fromXContent(parser, false), - parser -> ParsedBucket.fromXContent(parser, true)); + declareParsedRangeFields( + PARSER, + parser -> ParsedBucket.fromXContent(parser, false), + parser -> ParsedBucket.fromXContent(parser, true) + ); } public static ParsedDateRange fromXContent(XContentParser parser, String name) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedGeoDistance.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedGeoDistance.java index 22982cad09332..8d354bf6cc43e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedGeoDistance.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedGeoDistance.java @@ -20,12 +20,17 @@ public String getType() { return GeoDistanceAggregationBuilder.NAME; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedGeoDistance.class.getSimpleName(), true, ParsedGeoDistance::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedGeoDistance.class.getSimpleName(), + true, + ParsedGeoDistance::new + ); static { - declareParsedRangeFields(PARSER, - parser -> ParsedBucket.fromXContent(parser, false), - parser -> ParsedBucket.fromXContent(parser, true)); + declareParsedRangeFields( + PARSER, + parser -> ParsedBucket.fromXContent(parser, false), + parser -> ParsedBucket.fromXContent(parser, true) + ); } public static ParsedGeoDistance fromXContent(XContentParser parser, String name) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedRange.java index d64609e3f840a..beecb4e2b953d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedRange.java @@ -8,11 +8,11 @@ package org.elasticsearch.search.aggregations.bucket.range; -import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; @@ -36,18 +36,25 @@ public List getBuckets() { return buckets; } - protected static void declareParsedRangeFields(final ObjectParser objectParser, - final CheckedFunction bucketParser, - final CheckedFunction keyedBucketParser) { + protected static void declareParsedRangeFields( + final ObjectParser objectParser, + final CheckedFunction bucketParser, + final CheckedFunction keyedBucketParser + ) { declareMultiBucketAggregationFields(objectParser, bucketParser::apply, keyedBucketParser::apply); } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedRange.class.getSimpleName(), true, ParsedRange::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedRange.class.getSimpleName(), + true, + ParsedRange::new + ); static { - declareParsedRangeFields(PARSER, - parser -> ParsedBucket.fromXContent(parser, false), - parser -> ParsedBucket.fromXContent(parser, true)); + declareParsedRangeFields( + PARSER, + parser -> ParsedBucket.fromXContent(parser, false), + parser -> ParsedBucket.fromXContent(parser, true) + ); } public static ParsedRange fromXContent(XContentParser parser, String name) throws IOException { @@ -134,9 +141,11 @@ private static String doubleAsString(double d) { return Double.isInfinite(d) ? null : Double.toString(d); } - protected static B parseRangeBucketXContent(final XContentParser parser, - final Supplier bucketSupplier, - final boolean keyed) throws IOException { + protected static B parseRangeBucketXContent( + final XContentParser parser, + final Supplier bucketSupplier, + final boolean keyed + ) throws IOException { final B bucket = bucketSupplier.get(); bucket.setKeyed(keyed); XContentParser.Token token = parser.currentToken(); @@ -168,8 +177,12 @@ protected static B parseRangeBucketXContent(final XCont bucket.toAsString = parser.text(); } } else if (token == XContentParser.Token.START_OBJECT) { - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, - aggregations::add); + XContentParserUtils.parseTypedKeysObject( + parser, + Aggregation.TYPED_KEYS_DELIMITER, + Aggregation.class, + aggregations::add + ); } } bucket.setAggregations(new Aggregations(aggregations)); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilder.java index 08ea745c74a92..e82c382ab92e2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilder.java @@ -32,8 +32,7 @@ public class RangeAggregationBuilder extends AbstractRangeBuilder PARSER = - ObjectParser.fromBuilder(NAME, RangeAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder(NAME, RangeAggregationBuilder::new); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); PARSER.declareBoolean(RangeAggregationBuilder::keyed, RangeAggregator.KEYED_FIELD); @@ -65,9 +64,11 @@ public RangeAggregationBuilder(StreamInput in) throws IOException { super(in, InternalRange.FACTORY, Range::new); } - protected RangeAggregationBuilder(RangeAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + protected RangeAggregationBuilder( + RangeAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -143,11 +144,13 @@ public RangeAggregationBuilder addUnboundedFrom(double from) { } @Override - protected RangeAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - RangeAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + protected RangeAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + RangeAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); // We need to call processRanges here so they are parsed before we make the decision of whether to cache the request Range[] ranges = processRanges(range -> { @@ -167,8 +170,18 @@ protected RangeAggregatorFactory innerBuild(AggregationContext context, ValuesSo throw new IllegalArgumentException("No [ranges] specified for the [" + this.getName() + "] aggregation"); } - return new RangeAggregatorFactory(name, config, ranges, keyed, rangeFactory, - context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); + return new RangeAggregatorFactory( + name, + config, + ranges, + keyed, + rangeFactory, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index 6d49793bc4d4a..55c20e4eeaea9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -207,9 +207,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws }); static { - PARSER.declareField(optionalConstructorArg(), - (p, c) -> p.text(), - KEY_FIELD, ValueType.DOUBLE); // DOUBLE supports string and number + PARSER.declareField(optionalConstructorArg(), (p, c) -> p.text(), KEY_FIELD, ValueType.DOUBLE); // DOUBLE supports string and + // number ContextParser fromToParser = (p, c) -> { if (p.currentToken() == XContentParser.Token.VALUE_STRING) { return p.text(); @@ -239,10 +238,10 @@ public boolean equals(Object obj) { } Range other = (Range) obj; return Objects.equals(key, other.key) - && Objects.equals(from, other.from) - && Objects.equals(fromAsStr, other.fromAsStr) - && Objects.equals(to, other.to) - && Objects.equals(toAsStr, other.toAsStr); + && Objects.equals(from, other.from) + && Objects.equals(fromAsStr, other.fromAsStr) + && Objects.equals(to, other.to) + && Objects.equals(toAsStr, other.toAsStr); } } @@ -443,8 +442,7 @@ public RangeAggregator( AggregatorFactories factories, ValuesSource valuesSource, DocValueFormat format, - @SuppressWarnings("rawtypes") - InternalRange.Factory rangeFactory, + @SuppressWarnings("rawtypes") InternalRange.Factory rangeFactory, Range[] ranges, double averageDocsPerRange, boolean keyed, @@ -478,11 +476,15 @@ protected long subBucketOrdinal(long owningBucketOrdinal, int rangeOrd) { @Override @SuppressWarnings("unchecked") public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForFixedBucketCount(owningBucketOrds, ranges.length, + return buildAggregationsForFixedBucketCount( + owningBucketOrds, + ranges.length, (offsetInOwningOrd, docCount, subAggregationResults) -> { Range range = ranges[offsetInOwningOrd]; return rangeFactory.createBucket(range.key, range.from, range.to, docCount, subAggregationResults, keyed, format); - }, buckets -> rangeFactory.create(name, buckets, format, keyed, metadata())); + }, + buckets -> rangeFactory.create(name, buckets, format, keyed, metadata()) + ); } @Override @@ -492,8 +494,15 @@ public InternalAggregation buildEmptyAggregation() { List buckets = new ArrayList<>(ranges.length); for (int i = 0; i < ranges.length; i++) { Range range = ranges[i]; - org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket = - rangeFactory.createBucket(range.key, range.from, range.to, 0, subAggs, keyed, format); + org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket = rangeFactory.createBucket( + range.key, + range.from, + range.to, + 0, + subAggs, + keyed, + format + ); buckets.add(bucket); } // value source can be null in the case of unmapped fields @@ -523,8 +532,7 @@ public Unmapped( DocValueFormat format, AggregationContext context, Aggregator parent, - @SuppressWarnings("rawtypes") - InternalRange.Factory factory, + @SuppressWarnings("rawtypes") InternalRange.Factory factory, Map metadata ) throws IOException { super(name, context, parent, factories, metadata); @@ -580,7 +588,7 @@ private abstract static class NumericRangeAggregator extends RangeAggregator { @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException { - final SortedNumericDoubleValues values = ((ValuesSource.Numeric)this.valuesSource).doubleValues(ctx); + final SortedNumericDoubleValues values = ((ValuesSource.Numeric) this.valuesSource).doubleValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long bucket) throws IOException { @@ -606,8 +614,7 @@ static class NoOverlap extends NumericRangeAggregator { AggregatorFactories factories, Numeric valuesSource, DocValueFormat format, - @SuppressWarnings("rawtypes") - Factory rangeFactory, + @SuppressWarnings("rawtypes") Factory rangeFactory, Range[] ranges, double averageDocsPerRange, boolean keyed, @@ -779,15 +786,7 @@ protected InternalAggregation adapt(InternalAggregation delegateResult) { Range r = ranges[i]; InternalFilters.InternalBucket b = filters.getBuckets().get(i); buckets.add( - rangeFactory.createBucket( - r.getKey(), - r.getFrom(), - r.getTo(), - b.getDocCount(), - b.getAggregations(), - keyed, - format - ) + rangeFactory.createBucket(r.getKey(), r.getFrom(), r.getTo(), b.getDocCount(), b.getAggregations(), keyed, format) ); } return rangeFactory.create(name(), buckets, format, keyed, filters.getMetadata()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorSupplier.java index 462085ee78587..6b7e5ed2bdb95 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorSupplier.java @@ -17,14 +17,16 @@ import java.util.Map; public interface RangeAggregatorSupplier { - Aggregator build(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - InternalRange.Factory rangeFactory, - RangeAggregator.Range[] ranges, - boolean keyed, - AggregationContext context, - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException; + Aggregator build( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + InternalRange.Factory rangeFactory, + RangeAggregator.Range[] ranges, + boolean keyed, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index ee0db64465d49..abc737afca456 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -18,10 +18,10 @@ import org.apache.lucene.search.TopScoreDocCollector; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.BucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.MultiBucketCollector; @@ -118,7 +118,6 @@ public void postCollection() throws IOException { runDeferredAggs(); } - @Override public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { // no-op - deferred aggs processed in postCollection call diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java index 3b6dab73b8f62..3f1356eaba448 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java @@ -34,8 +34,10 @@ public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilde public static final int MAX_DOCS_PER_VALUE_DEFAULT = 1; - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, DiversifiedAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + DiversifiedAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, false, false); PARSER.declareInt(DiversifiedAggregationBuilder::shardSize, SamplerAggregator.SHARD_SIZE_FIELD); @@ -95,7 +97,8 @@ protected void innerWriteTo(StreamOutput out) throws IOException { public DiversifiedAggregationBuilder shardSize(int shardSize) { if (shardSize < 0) { throw new IllegalArgumentException( - "[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]"); + "[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]" + ); } this.shardSize = shardSize; return this; @@ -114,7 +117,8 @@ public int shardSize() { public DiversifiedAggregationBuilder maxDocsPerValue(int maxDocsPerValue) { if (maxDocsPerValue < 0) { throw new IllegalArgumentException( - "[maxDocsPerValue] must be greater than or equal to 0. Found [" + maxDocsPerValue + "] in [" + name + "]"); + "[maxDocsPerValue] must be greater than or equal to 0. Found [" + maxDocsPerValue + "] in [" + name + "]" + ); } this.maxDocsPerValue = maxDocsPerValue; return this; @@ -148,14 +152,25 @@ public BucketCardinality bucketCardinality() { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - Builder subFactoriesBuilder) throws IOException { - DiversifiedAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new DiversifiedAggregatorFactory(name, config, shardSize, maxDocsPerValue, executionHint, context, - parent, subFactoriesBuilder, metadata, aggregatorSupplier); + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + Builder subFactoriesBuilder + ) throws IOException { + DiversifiedAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new DiversifiedAggregatorFactory( + name, + config, + shardSize, + maxDocsPerValue, + executionHint, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorFactory.java index 9706ccffb3e91..45d420f7c7d08 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorFactory.java @@ -50,7 +50,8 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { valuesSourceConfig, maxDocsPerValue ), - true); + true + ); builder.register( DiversifiedAggregationBuilder.REGISTRY_KEY, @@ -79,7 +80,8 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { } return execution.create(name, factories, shardSize, maxDocsPerValue, valuesSourceConfig, context, parent, metadata); }, - true); + true + ); } private final DiversifiedAggregatorSupplier aggregatorSupplier; @@ -87,10 +89,18 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { private final int maxDocsPerValue; private final String executionHint; - DiversifiedAggregatorFactory(String name, ValuesSourceConfig config, int shardSize, int maxDocsPerValue, - String executionHint, AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, Map metadata, - DiversifiedAggregatorSupplier aggregatorSupplier) throws IOException { + DiversifiedAggregatorFactory( + String name, + ValuesSourceConfig config, + int shardSize, + int maxDocsPerValue, + String executionHint, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + DiversifiedAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.shardSize = shardSize; this.maxDocsPerValue = maxDocsPerValue; @@ -99,12 +109,10 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { } @Override - protected Aggregator doCreateInternal(Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { - return aggregatorSupplier.build(name, shardSize, factories, context, - parent, metadata, config, maxDocsPerValue, executionHint); + return aggregatorSupplier.build(name, shardSize, factories, context, parent, metadata, config, maxDocsPerValue, executionHint); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorSupplier.java index f3cac687d9c94..ced81266693ea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorSupplier.java @@ -26,5 +26,6 @@ Aggregator build( Map metadata, ValuesSourceConfig valuesSourceConfig, int maxDocsPerValue, - String executionHint) throws IOException; + String executionHint + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java index bf61d9e2a392a..7f0814f7bb1bb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java @@ -70,7 +70,6 @@ class DiverseDocsDeferringCollector extends BestDocsDeferringCollector { super(shardSize, bigArrays(), circuitBreakerConsumer); } - @Override protected TopDocsCollector createTopDocsCollector(int size) { // Make sure we do not allow size > maxDoc, to prevent accidental OOM @@ -109,8 +108,7 @@ public boolean advanceExact(int target) throws IOException { docID = target; if (values.advanceExact(target)) { if (values.docValueCount() > 1) { - throw new IllegalArgumentException( - "Sample diversifying key must be a single valued-field"); + throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); } return true; } else { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java index a3b3e2035ecd4..a69fb6357ba39 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java @@ -15,8 +15,8 @@ import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.AbstractNumericDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.aggregations.Aggregator; @@ -115,8 +115,7 @@ public boolean advanceExact(int target) throws IOException { docID = target; if (values.advanceExact(target)) { if (values.docValueCount() > 1) { - throw new IllegalArgumentException( - "Sample diversifying key must be a single valued-field"); + throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); } return true; } else { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java index 0f54717091b80..d4589df6e4efd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java @@ -99,8 +99,7 @@ protected NumericDocValues getKeys(LeafReaderContext context) { public boolean advanceExact(int target) throws IOException { if (values.advanceExact(target)) { if (values.docValueCount() > 1) { - throw new IllegalArgumentException( - "Sample diversifying key must be a single valued-field"); + throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); } return true; } else { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java index 08f4eeb430818..a10abe6944438 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java @@ -82,7 +82,6 @@ protected long getPriorityQueueSlotSize() { // a lookup from elasticsearch's ValuesSource class ValuesDiversifiedTopDocsCollector extends DiversifiedTopDocsCollector { - ValuesDiversifiedTopDocsCollector(int numHits, int maxHitsPerKey) { super(numHits, maxHitsPerKey); } @@ -121,8 +120,7 @@ public boolean advanceExact(int target) throws IOException { // Check there isn't a second value for this // document if (globalOrds.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { - throw new IllegalArgumentException( - "Sample diversifying key must be a single valued-field"); + throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); } return true; } else { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSampler.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSampler.java index 939abfde8350e..7062410d810c4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSampler.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSampler.java @@ -41,8 +41,7 @@ public String getType() { } @Override - protected InternalSingleBucketAggregation newAggregation(String name, long docCount, - InternalAggregations subAggregations) { + protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) { return new InternalSampler(name, docCount, subAggregations, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/Sampler.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/Sampler.java index de92c20255b8b..48ef7d8493aa6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/Sampler.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/Sampler.java @@ -14,5 +14,4 @@ * top-matching documents. Computation of child aggregations is deferred until * the top-matching documents on a shard have been determined. */ -public interface Sampler extends SingleBucketAggregation { -} +public interface Sampler extends SingleBucketAggregation {} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java index 2b6c84cc098da..cf34888a30371 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java @@ -79,7 +79,7 @@ public BucketCardinality bucketCardinality() { @Override protected SamplerAggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, Builder subFactoriesBuilder) - throws IOException { + throws IOException { return new SamplerAggregatorFactory(name, shardSize, context, parent, subFactoriesBuilder, metadata); } @@ -103,12 +103,16 @@ public static SamplerAggregationBuilder parse(String aggregationName, XContentPa if (SamplerAggregator.SHARD_SIZE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { shardSize = parser.intValue(); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unsupported property \"" + currentFieldName + "\" for aggregation \"" + aggregationName); + throw new ParsingException( + parser.getTokenLocation(), + "Unsupported property \"" + currentFieldName + "\" for aggregation \"" + aggregationName + ); } } else { - throw new ParsingException(parser.getTokenLocation(), - "Unsupported property \"" + currentFieldName + "\" for aggregation \"" + aggregationName); + throw new ParsingException( + parser.getTokenLocation(), + "Unsupported property \"" + currentFieldName + "\" for aggregation \"" + aggregationName + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java index af41459fb7acf..4eb29ae86798b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -11,9 +11,9 @@ import org.apache.lucene.search.DiversifiedTopDocsCollector; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.core.Releasables; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -63,8 +63,16 @@ Aggregator create( Map metadata ) throws IOException { - return new DiversifiedMapSamplerAggregator(name, shardSize, factories, context, parent, metadata, - valuesSourceConfig, maxDocsPerValue); + return new DiversifiedMapSamplerAggregator( + name, + shardSize, + factories, + context, + parent, + metadata, + valuesSourceConfig, + maxDocsPerValue + ); } @Override @@ -87,8 +95,16 @@ Aggregator create( Map metadata ) throws IOException { - return new DiversifiedBytesHashSamplerAggregator(name, shardSize, factories, context, parent, metadata, - valuesSourceConfig, maxDocsPerValue); + return new DiversifiedBytesHashSamplerAggregator( + name, + shardSize, + factories, + context, + parent, + metadata, + valuesSourceConfig, + maxDocsPerValue + ); } @Override @@ -110,8 +126,16 @@ Aggregator create( Aggregator parent, Map metadata ) throws IOException { - return new DiversifiedOrdinalsSamplerAggregator(name, shardSize, factories, context, parent, metadata, - valuesSourceConfig, maxDocsPerValue); + return new DiversifiedOrdinalsSamplerAggregator( + name, + shardSize, + factories, + context, + parent, + metadata, + valuesSourceConfig, + maxDocsPerValue + ); } @Override @@ -155,12 +179,17 @@ public String toString() { } } - protected final int shardSize; protected BestDocsDeferringCollector bdd; - SamplerAggregator(String name, int shardSize, AggregatorFactories factories, AggregationContext context, - Aggregator parent, Map metadata) throws IOException { + SamplerAggregator( + String name, + int shardSize, + AggregatorFactories factories, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, factories, context, parent, metadata); // Make sure we do not allow size > maxDoc, to prevent accidental OOM this.shardSize = Math.min(shardSize, searcher().getIndexReader().maxDoc()); @@ -184,8 +213,15 @@ protected boolean shouldDefer(Aggregator aggregator) { @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForSingleBucket(owningBucketOrds, (owningBucketOrd, subAggregationResults) -> - new InternalSampler(name, bdd == null ? 0 : bdd.getDocCount(owningBucketOrd), subAggregationResults, metadata())); + return buildAggregationsForSingleBucket( + owningBucketOrds, + (owningBucketOrd, subAggregationResults) -> new InternalSampler( + name, + bdd == null ? 0 : bdd.getDocCount(owningBucketOrd), + subAggregationResults, + metadata() + ) + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorFactory.java index 54b95bf9e168b..5c5f014b77e1e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorFactory.java @@ -21,8 +21,14 @@ public class SamplerAggregatorFactory extends AggregatorFactory { private final int shardSize; - SamplerAggregatorFactory(String name, int shardSize, AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactories, Map metadata) throws IOException { + SamplerAggregatorFactory( + String name, + int shardSize, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactories, + Map metadata + ) throws IOException { super(name, context, parent, subFactories, metadata); this.shardSize = shardSize; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index 1f5ed6e1f70a8..0c603607ea750 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -37,13 +37,10 @@ /** * Base class for terms and multi_terms aggregation that handles common reduce logic */ -public abstract class AbstractInternalTerms< - A extends AbstractInternalTerms, - B extends AbstractInternalTerms.AbstractTermsBucket - > extends InternalMultiBucketAggregation { +public abstract class AbstractInternalTerms, B extends AbstractInternalTerms.AbstractTermsBucket> + extends InternalMultiBucketAggregation { - public AbstractInternalTerms(String name, - Map metadata) { + public AbstractInternalTerms(String name, Map metadata) { super(name, metadata); } @@ -259,7 +256,7 @@ private void reduceLegacy( public InternalAggregation reduce(List aggregations, InternalAggregation.ReduceContext reduceContext) { long sumDocCountError = 0; - long[] otherDocCount = new long[] {0}; + long[] otherDocCount = new long[] { 0 }; A referenceTerms = null; for (InternalAggregation aggregation : aggregations) { @SuppressWarnings("unchecked") @@ -270,9 +267,12 @@ public InternalAggregation reduce(List aggregations, Intern if (referenceTerms != null && referenceTerms.getClass().equals(terms.getClass()) == false && terms.isMapped()) { // control gets into this loop when the same field name against which the query is executed // is of different types in different indices. - throw new AggregationExecutionException("Merging/Reducing the aggregations failed when computing the aggregation [" - + referenceTerms.getName() + "] because the field you gave in the aggregation query existed as two different " - + "types in two different indices"); + throw new AggregationExecutionException( + "Merging/Reducing the aggregations failed when computing the aggregation [" + + referenceTerms.getName() + + "] because the field you gave in the aggregation query existed as two different " + + "types in two different indices" + ); } otherDocCount[0] += terms.getSumOfOtherDocCounts(); final long thisAggDocCountError = getDocCountError(terms); @@ -292,16 +292,18 @@ public InternalAggregation reduce(List aggregations, Intern // for the existing error calculated in a previous reduce. // Note that if the error is unbounded (-1) this will be fixed // later in this method. - bucket.updateDocCountError(-thisAggDocCountError); + bucket.updateDocCountError(-thisAggDocCountError); } } BucketOrder thisReduceOrder; List result; if (reduceContext.isFinalReduce()) { - TopBucketBuilder top = TopBucketBuilder.build(getRequiredSize(), getOrder(), removed -> { - otherDocCount[0] += removed.getDocCount(); - }); + TopBucketBuilder top = TopBucketBuilder.build( + getRequiredSize(), + getOrder(), + removed -> { otherDocCount[0] += removed.getDocCount(); } + ); thisReduceOrder = reduceBuckets(aggregations, reduceContext, bucket -> { if (bucket.getDocCount() >= getMinDocCount()) { top.add(bucket); @@ -338,11 +340,13 @@ public InternalAggregation reduce(List aggregations, Intern return create(name, result, reduceContext.isFinalReduce() ? getOrder() : thisReduceOrder, docCountError, otherDocCount[0]); } - protected static XContentBuilder doXContentCommon(XContentBuilder builder, - Params params, - Long docCountError, - long otherDocCount, - List buckets) throws IOException { + protected static XContentBuilder doXContentCommon( + XContentBuilder builder, + Params params, + Long docCountError, + long otherDocCount, + List buckets + ) throws IOException { builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), docCountError); builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), otherDocCount); builder.startArray(CommonFields.BUCKETS.getPreferredName()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractRareTermsAggregator.java index 9492ed18d0b94..ea4218397a9ea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractRareTermsAggregator.java @@ -57,9 +57,16 @@ public abstract class AbstractRareTermsAggregator extends DeferableBucketAggrega * But the RareTerms agg _must_ execute in breadth first since it relies on * deferring execution, so we just have to throw up our hands and refuse */ - throw new IllegalStateException("RareTerms agg [" + name() + "] is the child of the nested agg [" + nestedAgg - + "], and also has a scoring child agg [" + scoringAgg + "]. This combination is not supported because " + - "it requires executing in [depth_first] mode, which the RareTerms agg cannot do."); + throw new IllegalStateException( + "RareTerms agg [" + + name() + + "] is the child of the nested agg [" + + nestedAgg + + "], and also has a scoring child agg [" + + scoringAgg + + "]. This combination is not supported because " + + "it requires executing in [depth_first] mode, which the RareTerms agg cannot do." + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractStringTermsAggregator.java index 263f6752c31e1..6351806d79a3f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractStringTermsAggregator.java @@ -24,16 +24,37 @@ abstract class AbstractStringTermsAggregator extends TermsAggregator { protected final boolean showTermDocCountError; - AbstractStringTermsAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent, - BucketOrder order, DocValueFormat format, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode subAggCollectMode, - boolean showTermDocCountError, Map metadata) throws IOException { + AbstractStringTermsAggregator( + String name, + AggregatorFactories factories, + AggregationContext context, + Aggregator parent, + BucketOrder order, + DocValueFormat format, + BucketCountThresholds bucketCountThresholds, + SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + Map metadata + ) throws IOException { super(name, factories, context, parent, bucketCountThresholds, order, format, subAggCollectMode, metadata); this.showTermDocCountError = showTermDocCountError; } protected StringTerms buildEmptyTermsAggregation() { - return new StringTerms(name, order, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), - metadata(), format, bucketCountThresholds.getShardSize(), showTermDocCountError, 0, emptyList(), 0L); + return new StringTerms( + name, + order, + order, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata(), + format, + bucketCountThresholds.getShardSize(), + showTermDocCountError, + 0, + emptyList(), + 0L + ); } protected SignificantStringTerms buildEmptySignificantTermsAggregation( @@ -42,7 +63,16 @@ protected SignificantStringTerms buildEmptySignificantTermsAggregation( SignificanceHeuristic significanceHeuristic ) { // We need to account for the significance of a miss in our global stats - provide corpus size as context - return new SignificantStringTerms(name, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), - metadata(), format, subsetSize, supersetSize, significanceHeuristic, emptyList()); + return new SignificantStringTerms( + name, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata(), + format, + subsetSize, + supersetSize, + significanceHeuristic, + emptyList() + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java index f05c5a1dab3c1..680e45326e0f5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java @@ -9,10 +9,10 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.CardinalityUpperBound; /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java index 439d4fb6e2df8..230ac0671683c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java @@ -30,8 +30,14 @@ public class DoubleTerms extends InternalMappedTerms { double term; - Bucket(double term, long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError, - DocValueFormat format) { + Bucket( + double term, + long docCount, + InternalAggregations aggregations, + boolean showDocCountError, + long docCountError, + DocValueFormat format + ) { super(docCount, aggregations, showDocCountError, docCountError, format); this.term = term; } @@ -89,11 +95,34 @@ public int hashCode() { } } - public DoubleTerms(String name, BucketOrder reduceOrder, BucketOrder order, int requiredSize, long minDocCount, - Map metadata, DocValueFormat format, int shardSize, boolean showTermDocCountError, long otherDocCount, - List buckets, Long docCountError) { - super(name, reduceOrder, order, requiredSize, minDocCount, metadata, format, shardSize, showTermDocCountError, - otherDocCount, buckets, docCountError); + public DoubleTerms( + String name, + BucketOrder reduceOrder, + BucketOrder order, + int requiredSize, + long minDocCount, + Map metadata, + DocValueFormat format, + int shardSize, + boolean showTermDocCountError, + long otherDocCount, + List buckets, + Long docCountError + ) { + super( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } /** @@ -110,28 +139,58 @@ public String getWriteableName() { @Override public DoubleTerms create(List buckets) { - return new DoubleTerms(name, reduceOrder, order, requiredSize, minDocCount, metadata, format, shardSize, - showTermDocCountError, otherDocCount, buckets, docCountError); + return new DoubleTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.term, prototype.docCount, aggregations, prototype.showDocCountError, prototype.docCountError, - prototype.format); + return new Bucket( + prototype.term, + prototype.docCount, + aggregations, + prototype.showDocCountError, + prototype.docCountError, + prototype.format + ); } @Override protected DoubleTerms create(String name, List buckets, BucketOrder reduceOrder, long docCountError, long otherDocCount) { - return new DoubleTerms(name, reduceOrder, order, requiredSize, minDocCount, getMetadata(), format, - shardSize, showTermDocCountError, otherDocCount, buckets, docCountError); + return new DoubleTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + getMetadata(), + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } @Override public InternalAggregation reduce(List aggregations, ReduceContext reduceContext) { boolean promoteToDouble = false; for (InternalAggregation agg : aggregations) { - if (agg instanceof LongTerms && - (((LongTerms) agg).format == DocValueFormat.RAW || ((LongTerms) agg).format == DocValueFormat.UNSIGNED_LONG_SHIFTED) ) { + if (agg instanceof LongTerms + && (((LongTerms) agg).format == DocValueFormat.RAW || ((LongTerms) agg).format == DocValueFormat.UNSIGNED_LONG_SHIFTED)) { /* * this terms agg mixes longs and doubles, we must promote longs to doubles to make the internal aggs * compatible diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 7c9947c0d780a..0b18abb98beaf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -16,11 +16,11 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; @@ -393,14 +393,17 @@ abstract class CollectionStrategy implements Releasable { * output to help with debugging. */ abstract String describe(); + /** * The total number of buckets collected by this strategy. */ abstract long totalBuckets(); + /** * Called when the global ordinals are ready. */ abstract void globalOrdsReady(SortedSetDocValues globalOrds); + /** * Called once per unique document, global ordinal combination to * collect the bucket. @@ -411,10 +414,12 @@ abstract class CollectionStrategy implements Releasable { * @param sub the sub-aggregators that that will collect the bucket data */ abstract void collectGlobalOrd(long owningBucketOrd, int doc, long globalOrd, LeafBucketCollector sub) throws IOException; + /** * Convert a global ordinal into a bucket ordinal. */ abstract long globalOrdToBucketOrd(long owningBucketOrd, long globalOrd); + /** * Iterate all of the buckets. Implementations take into account * the {@link BucketCountThresholds}. In particular, @@ -426,6 +431,7 @@ abstract class CollectionStrategy implements Releasable { */ abstract void forEach(long owningBucketOrd, BucketInfoConsumer consumer) throws IOException; } + interface BucketInfoConsumer { void accept(long globalOrd, long bucketOrd, long docCount) throws IOException; } @@ -694,6 +700,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep */ abstract R buildNoValuesResult(long owningBucketOrdinal); } + interface BucketUpdater { void updateBucket(TB spare, long globalOrd, long bucketOrd, long docCount) throws IOException; } @@ -763,9 +770,20 @@ StringTerms buildResult(long owningBucketOrd, long otherDocCount, StringTerms.Bu } else { reduceOrder = order; } - return new StringTerms(name, reduceOrder, order, bucketCountThresholds.getRequiredSize(), - bucketCountThresholds.getMinDocCount(), metadata(), format, bucketCountThresholds.getShardSize(), showTermDocCountError, - otherDocCount, Arrays.asList(topBuckets), null); + return new StringTerms( + name, + reduceOrder, + order, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata(), + format, + bucketCountThresholds.getShardSize(), + showTermDocCountError, + otherDocCount, + Arrays.asList(topBuckets), + null + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java index 49d4e1b18b058..68bd88b0e98c2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -26,10 +26,10 @@ import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -89,17 +89,18 @@ public static IncludeExclude parseInclude(XContentParser parser) throws IOExcept } else if (PARTITION_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { partition = parser.intValue(); } else { - throw new ElasticsearchParseException( - "Unknown parameter in Include/Exclude clause: " + currentFieldName); + throw new ElasticsearchParseException("Unknown parameter in Include/Exclude clause: " + currentFieldName); } } if (partition == null) { - throw new IllegalArgumentException("Missing [" + PARTITION_FIELD.getPreferredName() - + "] parameter for partition-based include"); + throw new IllegalArgumentException( + "Missing [" + PARTITION_FIELD.getPreferredName() + "] parameter for partition-based include" + ); } if (numPartitions == null) { - throw new IllegalArgumentException("Missing [" + NUM_PARTITIONS_FIELD.getPreferredName() - + "] parameter for partition-based include"); + throw new IllegalArgumentException( + "Missing [" + NUM_PARTITIONS_FIELD.getPreferredName() + "] parameter for partition-based include" + ); } return new IncludeExclude(partition, numPartitions); } else { @@ -137,7 +138,6 @@ public boolean accept(long value) { } } - public static class SetBackedLongFilter extends LongFilter { private LongSet valids; private LongSet invalids; @@ -185,7 +185,7 @@ class SetAndRegexStringFilter extends StringFilter { private SetAndRegexStringFilter(DocValueFormat format) { Automaton automaton = toAutomaton(); - this.runAutomaton = automaton == null ? null : new ByteRunAutomaton(automaton); + this.runAutomaton = automaton == null ? null : new ByteRunAutomaton(automaton); this.valids = parseForDocValues(includeValues, format); this.invalids = parseForDocValues(excludeValues, format); } @@ -222,8 +222,10 @@ public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) thro BytesRef term = termEnum.next(); while (term != null) { - if (Math.floorMod(StringHelper.murmurhash3_x86_32(term, HASH_PARTITIONING_SEED), - incNumPartitions) == incZeroBasedPartition) { + if (Math.floorMod( + StringHelper.murmurhash3_x86_32(term, HASH_PARTITIONING_SEED), + incNumPartitions + ) == incZeroBasedPartition) { acceptedGlobalOrdinals.set(termEnum.ord()); } term = termEnum.next(); @@ -240,7 +242,7 @@ class SetAndRegexOrdinalsFilter extends OrdinalsFilter { private SetAndRegexOrdinalsFilter(DocValueFormat format) { Automaton automaton = toAutomaton(); - this.compiled = automaton == null ? null : new CompiledAutomaton(automaton); + this.compiled = automaton == null ? null : new CompiledAutomaton(automaton); this.valids = parseForDocValues(includeValues, format); this.invalids = parseForDocValues(excludeValues, format); } @@ -300,7 +302,6 @@ public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) thro } } - private final RegExp include, exclude; private final SortedSet includeValues, excludeValues; private final int incZeroBasedPartition; @@ -367,7 +368,7 @@ public IncludeExclude(long[] includeValues, long[] excludeValues) { public IncludeExclude(int partition, int numPartitions) { if (partition < 0 || partition >= numPartitions) { - throw new IllegalArgumentException("Partition must be >=0 and < numPartition which is "+numPartitions); + throw new IllegalArgumentException("Partition must be >=0 and < numPartition which is " + numPartitions); } this.incZeroBasedPartition = partition; this.incNumPartitions = numPartitions; @@ -378,8 +379,6 @@ public IncludeExclude(int partition, int numPartitions) { } - - /** * Read from a stream. */ @@ -583,7 +582,7 @@ private Automaton toAutomaton() { } public StringFilter convertToStringFilter(DocValueFormat format) { - if (isPartitionBased()){ + if (isPartitionBased()) { return new PartitionedStringFilter(); } return new SetAndRegexStringFilter(format); @@ -603,7 +602,7 @@ private static SortedSet parseForDocValues(SortedSet endUser } public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format) { - if (isPartitionBased()){ + if (isPartitionBased()) { return new PartitionedOrdinalsFilter(); } @@ -612,7 +611,7 @@ public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format) { public LongFilter convertToLongFilter(DocValueFormat format) { - if(isPartitionBased()){ + if (isPartitionBased()) { return new PartitionedLongFilter(); } @@ -633,7 +632,7 @@ public LongFilter convertToLongFilter(DocValueFormat format) { } public LongFilter convertToDoubleFilter() { - if(isPartitionBased()){ + if (isPartitionBased()) { return new PartitionedLongFilter(); } @@ -686,26 +685,36 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public int hashCode() { return Objects.hash( - include == null ? null : include.getOriginalString(), - exclude == null ? null : exclude.getOriginalString(), - includeValues, excludeValues, incZeroBasedPartition, incNumPartitions); + include == null ? null : include.getOriginalString(), + exclude == null ? null : exclude.getOriginalString(), + includeValues, + excludeValues, + incZeroBasedPartition, + incNumPartitions + ); } @Override public boolean equals(Object obj) { if (obj == null) { return false; - } if (getClass() != obj.getClass()) { + } + if (getClass() != obj.getClass()) { return false; } IncludeExclude other = (IncludeExclude) obj; - return Objects.equals(include == null ? null : include.getOriginalString(), - other.include == null ? null : other.include.getOriginalString()) - && Objects.equals(exclude == null ? null : exclude.getOriginalString(), - other.exclude == null ? null : other.exclude.getOriginalString()) - && Objects.equals(includeValues, other.includeValues) && Objects.equals(excludeValues, other.excludeValues) - && Objects.equals(incZeroBasedPartition, other.incZeroBasedPartition) - && Objects.equals(incNumPartitions, other.incNumPartitions); + return Objects.equals( + include == null ? null : include.getOriginalString(), + other.include == null ? null : other.include.getOriginalString() + ) + && Objects.equals( + exclude == null ? null : exclude.getOriginalString(), + other.exclude == null ? null : other.exclude.getOriginalString() + ) + && Objects.equals(includeValues, other.includeValues) + && Objects.equals(excludeValues, other.excludeValues) + && Objects.equals(incZeroBasedPartition, other.incZeroBasedPartition) + && Objects.equals(incNumPartitions, other.incNumPartitions); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedRareTerms.java index 7e7fe3185e80a..a26972509664e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedRareTerms.java @@ -29,8 +29,8 @@ import java.util.function.Function; import java.util.stream.Collectors; -public abstract class InternalMappedRareTerms, B extends InternalRareTerms.Bucket> - extends InternalRareTerms { +public abstract class InternalMappedRareTerms, B extends InternalRareTerms.Bucket> extends + InternalRareTerms { protected DocValueFormat format; protected List buckets; @@ -40,8 +40,15 @@ public abstract class InternalMappedRareTerms, protected final Logger logger = LogManager.getLogger(getClass()); - InternalMappedRareTerms(String name, BucketOrder order, Map metadata, DocValueFormat format, - List buckets, long maxDocCount, SetBackedScalingCuckooFilter filter) { + InternalMappedRareTerms( + String name, + BucketOrder order, + Map metadata, + DocValueFormat format, + List buckets, + long maxDocCount, + SetBackedScalingCuckooFilter filter + ) { super(name, order, maxDocCount, metadata); this.format = format; this.buckets = buckets; @@ -91,21 +98,24 @@ public InternalAggregation reduce(List aggregations, Reduce if (referenceTerms == null && aggregation.getClass().equals(UnmappedRareTerms.class) == false) { referenceTerms = terms; } - if (referenceTerms != null && - referenceTerms.getClass().equals(terms.getClass()) == false && - terms.getClass().equals(UnmappedRareTerms.class) == false) { + if (referenceTerms != null + && referenceTerms.getClass().equals(terms.getClass()) == false + && terms.getClass().equals(UnmappedRareTerms.class) == false) { // control gets into this loop when the same field name against which the query is executed // is of different types in different indices. - throw new AggregationExecutionException("Merging/Reducing the aggregations failed when computing the aggregation [" - + referenceTerms.getName() + "] because the field you gave in the aggregation query existed as two different " - + "types in two different indices"); + throw new AggregationExecutionException( + "Merging/Reducing the aggregations failed when computing the aggregation [" + + referenceTerms.getName() + + "] because the field you gave in the aggregation query existed as two different " + + "types in two different indices" + ); } for (B bucket : terms.getBuckets()) { List bucketList = buckets.computeIfAbsent(bucket.getKey(), k -> new ArrayList<>()); bucketList.add(bucket); } - SetBackedScalingCuckooFilter otherFilter = ((InternalMappedRareTerms)aggregation).getFilter(); + SetBackedScalingCuckooFilter otherFilter = ((InternalMappedRareTerms) aggregation).getFilter(); if (filter == null) { filter = new SetBackedScalingCuckooFilter(otherFilter.getThreshold(), otherFilter.getRng(), otherFilter.getFpp()); } @@ -151,10 +161,8 @@ public boolean equals(Object obj) { if (this == obj) return true; if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; - InternalMappedRareTerms that = (InternalMappedRareTerms) obj; - return Objects.equals(buckets, that.buckets) - && Objects.equals(format, that.format) - && Objects.equals(filter, that.filter); + InternalMappedRareTerms that = (InternalMappedRareTerms) obj; + return Objects.equals(buckets, that.buckets) && Objects.equals(format, that.format) && Objects.equals(filter, that.filter); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java index fe0752b7edc72..8eb9662517fc9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java @@ -23,9 +23,8 @@ import java.util.stream.Collectors; public abstract class InternalMappedSignificantTerms< - A extends InternalMappedSignificantTerms, - B extends InternalSignificantTerms.Bucket> - extends InternalSignificantTerms { + A extends InternalMappedSignificantTerms, + B extends InternalSignificantTerms.Bucket> extends InternalSignificantTerms { protected final DocValueFormat format; protected final long subsetSize; @@ -34,9 +33,17 @@ public abstract class InternalMappedSignificantTerms< protected final List buckets; protected Map bucketMap; - protected InternalMappedSignificantTerms(String name, int requiredSize, long minDocCount, - Map metadata, DocValueFormat format, long subsetSize, long supersetSize, - SignificanceHeuristic significanceHeuristic, List buckets) { + protected InternalMappedSignificantTerms( + String name, + int requiredSize, + long minDocCount, + Map metadata, + DocValueFormat format, + long subsetSize, + long supersetSize, + SignificanceHeuristic significanceHeuristic, + List buckets + ) { super(name, requiredSize, minDocCount, metadata); this.format = format; this.buckets = buckets; @@ -104,11 +111,11 @@ public boolean equals(Object obj) { InternalMappedSignificantTerms that = (InternalMappedSignificantTerms) obj; return Objects.equals(format, that.format) - && subsetSize == that.subsetSize - && supersetSize == that.supersetSize - && Objects.equals(significanceHeuristic, that.significanceHeuristic) - && Objects.equals(buckets, that.buckets) - && Objects.equals(bucketMap, that.bucketMap); + && subsetSize == that.subsetSize + && supersetSize == that.supersetSize + && Objects.equals(significanceHeuristic, that.significanceHeuristic) + && Objects.equals(buckets, that.buckets) + && Objects.equals(bucketMap, that.bucketMap); } @Override @@ -122,7 +129,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.field(BG_COUNT, supersetSize); builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - //There is a condition (presumably when only one shard has a bucket?) where reduce is not called + // There is a condition (presumably when only one shard has a bucket?) where reduce is not called // and I end up with buckets that contravene the user's min_doc_count criteria in my reducer if (bucket.subsetDf >= minDocCount) { bucket.toXContent(builder, params); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java index b9a519c8f7f6b..e42a711511d25 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java @@ -35,9 +35,20 @@ public abstract class InternalMappedTerms, B exten protected Long docCountError; - protected InternalMappedTerms(String name, BucketOrder reduceOrder, BucketOrder order, int requiredSize, long minDocCount, - Map metadata, DocValueFormat format, int shardSize, - boolean showTermDocCountError, long otherDocCount, List buckets, Long docCountError) { + protected InternalMappedTerms( + String name, + BucketOrder reduceOrder, + BucketOrder order, + int requiredSize, + long minDocCount, + Map metadata, + DocValueFormat format, + int shardSize, + boolean showTermDocCountError, + long otherDocCount, + List buckets, + Long docCountError + ) { super(name, reduceOrder, order, requiredSize, minDocCount, metadata); this.format = format; this.shardSize = shardSize; @@ -126,13 +137,13 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; - InternalMappedTerms that = (InternalMappedTerms) obj; + InternalMappedTerms that = (InternalMappedTerms) obj; return Objects.equals(buckets, that.buckets) - && Objects.equals(format, that.format) - && Objects.equals(otherDocCount, that.otherDocCount) - && Objects.equals(showTermDocCountError, that.showTermDocCountError) - && Objects.equals(shardSize, that.shardSize) - && Objects.equals(docCountError, that.docCountError); + && Objects.equals(format, that.format) + && Objects.equals(otherDocCount, that.otherDocCount) + && Objects.equals(showTermDocCountError, that.showTermDocCountError) + && Objects.equals(shardSize, that.shardSize) + && Objects.equals(docCountError, that.docCountError); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java index 9023e9cd3e7b8..b453b9e0189ae 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java @@ -26,11 +26,15 @@ import java.util.Map; import java.util.Objects; -public abstract class InternalRareTerms, B extends InternalRareTerms.Bucket> - extends InternalMultiBucketAggregation implements RareTerms { +public abstract class InternalRareTerms, B extends InternalRareTerms.Bucket> extends + InternalMultiBucketAggregation + implements + RareTerms { public abstract static class Bucket> extends InternalMultiBucketAggregation.InternalBucket - implements RareTerms.Bucket, KeyComparable { + implements + RareTerms.Bucket, + KeyComparable { /** * Reads a bucket. Should be a constructor reference. */ @@ -97,8 +101,7 @@ public boolean equals(Object obj) { return false; } Bucket that = (Bucket) obj; - return Objects.equals(docCount, that.docCount) - && Objects.equals(aggregations, that.aggregations); + return Objects.equals(docCount, that.docCount) && Objects.equals(aggregations, that.aggregations); } @Override @@ -172,9 +175,8 @@ public boolean equals(Object obj) { if (this == obj) return true; if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; - InternalRareTerms that = (InternalRareTerms) obj; - return Objects.equals(maxDocCount, that.maxDocCount) - && Objects.equals(order, that.order); + InternalRareTerms that = (InternalRareTerms) obj; + return Objects.equals(maxDocCount, that.maxDocCount) && Objects.equals(order, that.order); } @Override @@ -182,8 +184,8 @@ public int hashCode() { return Objects.hash(super.hashCode(), maxDocCount, order); } - protected static XContentBuilder doXContentCommon(XContentBuilder builder, Params params, - List> buckets) throws IOException { + protected static XContentBuilder doXContentCommon(XContentBuilder builder, Params params, List> buckets) + throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { bucket.toXContent(builder, params); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index 08a70bdb1de9b..1f5feb9d7367f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -29,14 +29,17 @@ * Result of the significant terms aggregation. */ public abstract class InternalSignificantTerms, B extends InternalSignificantTerms.Bucket> - extends InternalMultiBucketAggregation implements SignificantTerms { + extends InternalMultiBucketAggregation + implements + SignificantTerms { public static final String SCORE = "score"; public static final String BG_COUNT = "bg_count"; @SuppressWarnings("PMD.ConstructorCallsOverridableMethod") public abstract static class Bucket> extends InternalMultiBucketAggregation.InternalBucket - implements SignificantTerms.Bucket { + implements + SignificantTerms.Bucket { /** * Reads a bucket. Should be a constructor reference. */ @@ -54,8 +57,14 @@ public interface Reader> { protected InternalAggregations aggregations; final transient DocValueFormat format; - protected Bucket(long subsetDf, long subsetSize, long supersetDf, long supersetSize, - InternalAggregations aggregations, DocValueFormat format) { + protected Bucket( + long subsetDf, + long subsetSize, + long supersetDf, + long supersetSize, + InternalAggregations aggregations, + DocValueFormat format + ) { this.subsetSize = subsetSize; this.supersetSize = supersetSize; this.subsetDf = subsetDf; @@ -125,10 +134,10 @@ public boolean equals(Object o) { } Bucket that = (Bucket) o; - return bucketOrd == that.bucketOrd && - Double.compare(that.score, score) == 0 && - Objects.equals(aggregations, that.aggregations) && - Objects.equals(format, that.format); + return bucketOrd == that.bucketOrd + && Double.compare(that.score, score) == 0 + && Objects.equals(aggregations, that.aggregations) + && Objects.equals(format, that.format); } @Override @@ -204,8 +213,16 @@ public InternalAggregation reduce(List aggregations, Reduce } // Adjust the buckets with the global stats representing the // total size of the pots from which the stats are drawn - existingBuckets.add(createBucket(bucket.getSubsetDf(), globalSubsetSize, bucket.getSupersetDf(), globalSupersetSize, - bucket.aggregations, bucket)); + existingBuckets.add( + createBucket( + bucket.getSubsetDf(), + globalSubsetSize, + bucket.getSupersetDf(), + globalSupersetSize, + bucket.aggregations, + bucket + ) + ); } } SignificanceHeuristic heuristic = getSignificanceHeuristic().rewrite(reduceContext); @@ -248,8 +265,14 @@ protected B reduceBucket(List buckets, ReduceContext context) { return createBucket(subsetDf, buckets.get(0).subsetSize, supersetDf, buckets.get(0).supersetSize, aggs, buckets.get(0)); } - abstract B createBucket(long subsetDf, long subsetSize, long supersetDf, long supersetSize, - InternalAggregations aggregations, B prototype); + abstract B createBucket( + long subsetDf, + long subsetSize, + long supersetDf, + long supersetSize, + InternalAggregations aggregations, + B prototype + ); protected abstract A create(long subsetSize, long supersetSize, List buckets); @@ -276,7 +299,6 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalSignificantTerms that = (InternalSignificantTerms) obj; - return Objects.equals(minDocCount, that.minDocCount) - && Objects.equals(requiredSize, that.requiredSize); + return Objects.equals(minDocCount, that.minDocCount) && Objects.equals(requiredSize, that.requiredSize); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 2aa8b90a84db8..cef41afb07346 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -8,9 +8,9 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregations; @@ -24,15 +24,14 @@ import java.util.Map; import java.util.Objects; -public abstract class InternalTerms, B extends InternalTerms.Bucket> - extends AbstractInternalTerms implements Terms { - +public abstract class InternalTerms, B extends InternalTerms.Bucket> extends AbstractInternalTerms + implements + Terms { public static final ParseField DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME = new ParseField("doc_count_error_upper_bound"); public static final ParseField SUM_OF_OTHER_DOC_COUNTS = new ParseField("sum_other_doc_count"); - public abstract static class Bucket> extends AbstractTermsBucket - implements Terms.Bucket, KeyComparable { + public abstract static class Bucket> extends AbstractTermsBucket implements Terms.Bucket, KeyComparable { /** * Reads a bucket. Should be a constructor reference. */ @@ -49,8 +48,13 @@ public interface Reader> { protected final boolean showDocCountError; protected final DocValueFormat format; - protected Bucket(long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError, - DocValueFormat formatter) { + protected Bucket( + long docCount, + InternalAggregations aggregations, + boolean showDocCountError, + long docCountError, + DocValueFormat formatter + ) { this.showDocCountError = showDocCountError; this.format = formatter; this.docCount = docCount; @@ -142,8 +146,8 @@ public boolean equals(Object obj) { // of the parent terms aggregation object that are only copied here // for serialization purposes return Objects.equals(docCount, that.docCount) - && Objects.equals(docCountError, that.docCountError) - && Objects.equals(aggregations, that.aggregations); + && Objects.equals(docCountError, that.docCountError) + && Objects.equals(aggregations, that.aggregations); } @Override @@ -166,12 +170,14 @@ public int hashCode() { * @param minDocCount The minimum number of documents allowed per bucket. * @param metadata The metadata associated with the aggregation. */ - protected InternalTerms(String name, - BucketOrder reduceOrder, - BucketOrder order, - int requiredSize, - long minDocCount, - Map metadata) { + protected InternalTerms( + String name, + BucketOrder reduceOrder, + BucketOrder order, + int requiredSize, + long minDocCount, + Map metadata + ) { super(name, metadata); this.reduceOrder = reduceOrder; this.order = order; @@ -183,15 +189,15 @@ protected InternalTerms(String name, * Read from a stream. */ protected InternalTerms(StreamInput in) throws IOException { - super(in); - reduceOrder = InternalOrder.Streams.readOrder(in); - if (in.getVersion().onOrAfter(Version.V_7_10_0)) { - order = InternalOrder.Streams.readOrder(in); - } else { - order = reduceOrder; - } - requiredSize = readSize(in); - minDocCount = in.readVLong(); + super(in); + reduceOrder = InternalOrder.Streams.readOrder(in); + if (in.getVersion().onOrAfter(Version.V_7_10_0)) { + order = InternalOrder.Streams.readOrder(in); + } else { + order = reduceOrder; + } + requiredSize = readSize(in); + minDocCount = in.readVLong(); } @Override @@ -245,11 +251,11 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; - InternalTerms that = (InternalTerms) obj; + InternalTerms that = (InternalTerms) obj; return Objects.equals(minDocCount, that.minDocCount) - && Objects.equals(reduceOrder, that.reduceOrder) - && Objects.equals(order, that.order) - && Objects.equals(requiredSize, that.requiredSize); + && Objects.equals(reduceOrder, that.reduceOrder) + && Objects.equals(order, that.order) + && Objects.equals(requiredSize, that.requiredSize); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongKeyedBucketOrds.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongKeyedBucketOrds.java index 3a93ea70cbc21..bbcad314e87be 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongKeyedBucketOrds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongKeyedBucketOrds.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.terms; -import org.elasticsearch.core.Releasable; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.common.util.LongLongHash; +import org.elasticsearch.core.Releasable; import org.elasticsearch.search.aggregations.CardinalityUpperBound; import java.util.Locale; @@ -70,7 +70,7 @@ private LongKeyedBucketOrds() {} * Find the {@code owningBucketOrd, value} pair. Return the ord for * their bucket if they have been added or {@code -1} if they haven't. */ - public abstract long find(long owningBucketOrd, long value); + public abstract long find(long owningBucketOrd, long value); /** * Returns the value currently associated with the bucket ordinal. @@ -100,6 +100,7 @@ private LongKeyedBucketOrds() {} * {@link BucketOrdsEnum#next()} to move it to the first value. */ public abstract BucketOrdsEnum ordsEnum(long owningBucketOrd); + /** * An iterator for buckets inside a particular {@code owningBucketOrd}. */ @@ -110,10 +111,12 @@ public interface BucketOrdsEnum { * {@code false} if there isn't */ boolean next(); + /** * The ordinal of the current value. */ long ord(); + /** * The current value. */ @@ -124,11 +127,19 @@ public interface BucketOrdsEnum { */ BucketOrdsEnum EMPTY = new BucketOrdsEnum() { @Override - public boolean next() { return false; } + public boolean next() { + return false; + } + @Override - public long ord() { return 0; } + public long ord() { + return 0; + } + @Override - public long value() { return 0; } + public long value() { + return 0; + } }; } @@ -155,7 +166,6 @@ public long find(long owningBucketOrd, long value) { return ords.find(value); } - @Override public long get(long ordinal) { return ords.get(ordinal); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTerms.java index 43177baee6b79..435a89b94f924 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTerms.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.search.aggregations.bucket.terms; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.SetBackedScalingCuckooFilter; @@ -88,8 +87,15 @@ public int hashCode() { } } - LongRareTerms(String name, BucketOrder order, Map metadata, DocValueFormat format, - List buckets, long maxDocCount, SetBackedScalingCuckooFilter filter) { + LongRareTerms( + String name, + BucketOrder order, + Map metadata, + DocValueFormat format, + List buckets, + long maxDocCount, + SetBackedScalingCuckooFilter filter + ) { super(name, order, metadata, format, buckets, maxDocCount, filter); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java index f24064efaa42c..8f521608b7e36 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java @@ -9,9 +9,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.common.util.SetBackedScalingCuckooFilter; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -53,16 +53,7 @@ public class LongRareTermsAggregator extends AbstractRareTermsAggregator { CardinalityUpperBound cardinality, Map metadata ) throws IOException { - super( - name, - factories, - aggregationContext, - parent, - metadata, - maxDocCount, - precision, - format - ); + super(name, factories, aggregationContext, parent, metadata, maxDocCount, precision, format); this.valuesSource = valuesSource; this.filter = filter; this.bucketOrds = LongKeyedBucketOrds.build(bigArrays(), cardinality); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java index e87de401c8b0f..a10a2087902ce 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java @@ -30,8 +30,14 @@ public class LongTerms extends InternalMappedTerms public static class Bucket extends InternalTerms.Bucket { long term; - public Bucket(long term, long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError, - DocValueFormat format) { + public Bucket( + long term, + long docCount, + InternalAggregations aggregations, + boolean showDocCountError, + long docCountError, + DocValueFormat format + ) { super(docCount, aggregations, showDocCountError, docCountError, format); this.term = term; } @@ -101,11 +107,34 @@ public int hashCode() { } } - public LongTerms(String name, BucketOrder reduceOrder, BucketOrder order, int requiredSize, long minDocCount, - Map metadata, DocValueFormat format, int shardSize, boolean showTermDocCountError, long otherDocCount, - List buckets, Long docCountError) { - super(name, reduceOrder, order, requiredSize, minDocCount, metadata, format, shardSize, showTermDocCountError, - otherDocCount, buckets, docCountError); + public LongTerms( + String name, + BucketOrder reduceOrder, + BucketOrder order, + int requiredSize, + long minDocCount, + Map metadata, + DocValueFormat format, + int shardSize, + boolean showTermDocCountError, + long otherDocCount, + List buckets, + Long docCountError + ) { + super( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } /** @@ -122,20 +151,50 @@ public String getWriteableName() { @Override public LongTerms create(List buckets) { - return new LongTerms(name, reduceOrder, order, requiredSize, minDocCount, metadata, format, shardSize, - showTermDocCountError, otherDocCount, buckets, docCountError); + return new LongTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.term, prototype.docCount, aggregations, prototype.showDocCountError, prototype.docCountError, - prototype.format); + return new Bucket( + prototype.term, + prototype.docCount, + aggregations, + prototype.showDocCountError, + prototype.docCountError, + prototype.format + ); } @Override protected LongTerms create(String name, List buckets, BucketOrder reduceOrder, long docCountError, long otherDocCount) { - return new LongTerms(name, reduceOrder, order, requiredSize, minDocCount, getMetadata(), format, shardSize, - showTermDocCountError, otherDocCount, buckets, docCountError); + return new LongTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + getMetadata(), + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } @Override @@ -181,14 +240,30 @@ static DoubleTerms convertLongTermsToDouble(LongTerms longTerms, DocValueFormat List buckets = longTerms.getBuckets(); List newBuckets = new ArrayList<>(); for (Terms.Bucket bucket : buckets) { - newBuckets.add(new DoubleTerms.Bucket(bucket.getKeyAsNumber().doubleValue(), - bucket.getDocCount(), (InternalAggregations) bucket.getAggregations(), longTerms.showTermDocCountError, - longTerms.showTermDocCountError ? bucket.getDocCountError() : 0, decimalFormat)); + newBuckets.add( + new DoubleTerms.Bucket( + bucket.getKeyAsNumber().doubleValue(), + bucket.getDocCount(), + (InternalAggregations) bucket.getAggregations(), + longTerms.showTermDocCountError, + longTerms.showTermDocCountError ? bucket.getDocCountError() : 0, + decimalFormat + ) + ); } - return new DoubleTerms(longTerms.getName(), longTerms.reduceOrder, longTerms.order, longTerms.requiredSize, + return new DoubleTerms( + longTerms.getName(), + longTerms.reduceOrder, + longTerms.order, + longTerms.requiredSize, longTerms.minDocCount, - longTerms.metadata, longTerms.format, longTerms.shardSize, - longTerms.showTermDocCountError, longTerms.otherDocCount, - newBuckets, longTerms.docCountError); + longTerms.metadata, + longTerms.format, + longTerms.shardSize, + longTerms.showTermDocCountError, + longTerms.otherDocCount, + newBuckets, + longTerms.docCountError + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java index e61cad1458915..8ba01b3fae754 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java @@ -12,9 +12,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -161,6 +161,7 @@ LeafBucketCollector getLeafCollector( CollectConsumer consumer ) throws IOException; } + @FunctionalInterface public interface CollectConsumer { void accept(LeafBucketCollector sub, int doc, long owningBucketOrd, BytesRef bytes) throws IOException; @@ -444,9 +445,20 @@ StringTerms buildResult(long owningBucketOrd, long otherDocCount, StringTerms.Bu } else { reduceOrder = order; } - return new StringTerms(name, reduceOrder, order, bucketCountThresholds.getRequiredSize(), - bucketCountThresholds.getMinDocCount(), metadata(), format, bucketCountThresholds.getShardSize(), showTermDocCountError, - otherDocCount, Arrays.asList(topBuckets), null); + return new StringTerms( + name, + reduceOrder, + order, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata(), + format, + bucketCountThresholds.getShardSize(), + showTermDocCountError, + otherDocCount, + Arrays.asList(topBuckets), + null + ); } @Override @@ -577,4 +589,3 @@ public void close() { } } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java index 4422f0349d4fe..02e3603167f2f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java @@ -12,9 +12,9 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -64,8 +64,7 @@ public NumericTermsAggregator( IncludeExclude.LongFilter longFilter, CardinalityUpperBound cardinality, Map metadata - ) - throws IOException { + ) throws IOException { super(name, factories, context, parent, bucketCountThresholds, order, format, subAggCollectMode, metadata); this.resultStrategy = resultStrategy.apply(this); // ResultStrategy needs a reference to the Aggregator to do its job. this.valuesSource = valuesSource; @@ -178,8 +177,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCounts[ordIdx], - topBucketsPerOrd[ordIdx]); + result[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCounts[ordIdx], topBucketsPerOrd[ordIdx]); } return result; } @@ -592,5 +590,4 @@ public void close() { } } - } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedDoubleTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedDoubleTerms.java index bf52fc3049f4d..d949ebb0f91cf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedDoubleTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedDoubleTerms.java @@ -21,8 +21,11 @@ public String getType() { return DoubleTerms.NAME; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedDoubleTerms.class.getSimpleName(), true, ParsedDoubleTerms::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedDoubleTerms.class.getSimpleName(), + true, + ParsedDoubleTerms::new + ); static { declareParsedTermsFields(PARSER, ParsedBucket::fromXContent); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongRareTerms.java index 2aa09e467b827..603c0685462ce 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongRareTerms.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.bucket.terms; - import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -21,8 +20,11 @@ public String getType() { return LongRareTerms.NAME; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedLongRareTerms.class.getSimpleName(), true, ParsedLongRareTerms::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedLongRareTerms.class.getSimpleName(), + true, + ParsedLongRareTerms::new + ); static { declareParsedTermsFields(PARSER, ParsedBucket::fromXContent); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongTerms.java index 16f52634b4865..1af68873961bc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedLongTerms.java @@ -21,8 +21,11 @@ public String getType() { return LongTerms.NAME; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedLongTerms.class.getSimpleName(), true, ParsedLongTerms::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedLongTerms.class.getSimpleName(), + true, + ParsedLongTerms::new + ); static { declareParsedTermsFields(PARSER, ParsedBucket::fromXContent); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedRareTerms.java index 898d4c7b9d522..cee9d39ee16f3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedRareTerms.java @@ -49,8 +49,10 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) return builder; } - static void declareParsedTermsFields(final ObjectParser objectParser, - final CheckedFunction bucketParser) { + static void declareParsedTermsFields( + final ObjectParser objectParser, + final CheckedFunction bucketParser + ) { declareMultiBucketAggregationFields(objectParser, bucketParser, bucketParser); } @@ -66,10 +68,11 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) return builder; } - - static B parseRareTermsBucketXContent(final XContentParser parser, final Supplier bucketSupplier, - final CheckedBiConsumer keyConsumer) - throws IOException { + static B parseRareTermsBucketXContent( + final XContentParser parser, + final Supplier bucketSupplier, + final CheckedBiConsumer keyConsumer + ) throws IOException { final B bucket = bucketSupplier.get(); final List aggregations = new ArrayList<>(); @@ -88,8 +91,12 @@ static B parseRareTermsBucketXContent(final XContentPar bucket.setDocCount(parser.longValue()); } } else if (token == XContentParser.Token.START_OBJECT) { - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, - aggregations::add); + XContentParserUtils.parseTypedKeysObject( + parser, + Aggregation.TYPED_KEYS_DELIMITER, + Aggregation.class, + aggregations::add + ); } } bucket.setAggregations(new Aggregations(aggregations)); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantLongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantLongTerms.java index af150a63a7c6d..4980f70f2e50c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantLongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantLongTerms.java @@ -21,8 +21,11 @@ public String getType() { return SignificantLongTerms.NAME; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedSignificantLongTerms.class.getSimpleName(), true, ParsedSignificantLongTerms::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedSignificantLongTerms.class.getSimpleName(), + true, + ParsedSignificantLongTerms::new + ); static { declareParsedSignificantTermsFields(PARSER, ParsedBucket::fromXContent); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantStringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantStringTerms.java index 4f89d9164fdc2..635cfdbed9555 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantStringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantStringTerms.java @@ -23,8 +23,11 @@ public String getType() { return SignificantStringTerms.NAME; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedSignificantStringTerms.class.getSimpleName(), true, ParsedSignificantStringTerms::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedSignificantStringTerms.class.getSimpleName(), + true, + ParsedSignificantStringTerms::new + ); static { declareParsedSignificantTermsFields(PARSER, ParsedBucket::fromXContent); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantTerms.java index 059a4f65b73df..bbed7ab4ba2f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedSignificantTerms.java @@ -29,7 +29,8 @@ import java.util.stream.Collectors; public abstract class ParsedSignificantTerms extends ParsedMultiBucketAggregation - implements SignificantTerms { + implements + SignificantTerms { private Map bucketMap; protected long subsetSize; @@ -73,8 +74,10 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) return builder; } - static T parseSignificantTermsXContent(final CheckedSupplier aggregationSupplier, - final String name) throws IOException { + static T parseSignificantTermsXContent( + final CheckedSupplier aggregationSupplier, + final String name + ) throws IOException { T aggregation = aggregationSupplier.get(); aggregation.setName(name); for (ParsedBucket bucket : aggregation.buckets) { @@ -89,9 +92,11 @@ static void declareParsedSignificantTermsFields( final CheckedFunction bucketParser ) { declareMultiBucketAggregationFields(objectParser, bucketParser, bucketParser); - objectParser.declareLong((parsedTerms, value) -> parsedTerms.subsetSize = value , CommonFields.DOC_COUNT); - objectParser.declareLong((parsedTerms, value) -> parsedTerms.supersetSize = value , - new ParseField(InternalMappedSignificantTerms.BG_COUNT)); + objectParser.declareLong((parsedTerms, value) -> parsedTerms.subsetSize = value, CommonFields.DOC_COUNT); + objectParser.declareLong( + (parsedTerms, value) -> parsedTerms.supersetSize = value, + new ParseField(InternalMappedSignificantTerms.BG_COUNT) + ); } public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements SignificantTerms.Bucket { @@ -147,8 +152,11 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) @Override protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; - static B parseSignificantTermsBucketXContent(final XContentParser parser, final B bucket, - final CheckedBiConsumer keyConsumer) throws IOException { + static B parseSignificantTermsBucketXContent( + final XContentParser parser, + final B bucket, + final CheckedBiConsumer keyConsumer + ) throws IOException { final List aggregations = new ArrayList<>(); XContentParser.Token token; @@ -171,8 +179,12 @@ static B parseSignificantTermsBucketXContent(final XCon bucket.supersetDf = parser.longValue(); } } else if (token == XContentParser.Token.START_OBJECT) { - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, - aggregations::add); + XContentParserUtils.parseTypedKeysObject( + parser, + Aggregation.TYPED_KEYS_DELIMITER, + Aggregation.class, + aggregations::add + ); } } bucket.setAggregations(new Aggregations(aggregations)); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringRareTerms.java index bf0a1f931891b..d89bcfb501e89 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringRareTerms.java @@ -8,22 +8,25 @@ package org.elasticsearch.search.aggregations.bucket.terms; -import java.io.IOException; -import java.nio.CharBuffer; - import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import java.io.IOException; +import java.nio.CharBuffer; + public class ParsedStringRareTerms extends ParsedRareTerms { @Override public String getType() { return StringRareTerms.NAME; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedStringRareTerms.class.getSimpleName(), true, ParsedStringRareTerms::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedStringRareTerms.class.getSimpleName(), + true, + ParsedStringRareTerms::new + ); static { declareParsedTermsFields(PARSER, ParsedBucket::fromXContent); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringTerms.java index d34b4fc807cbc..c0fd24b694acd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedStringTerms.java @@ -23,8 +23,11 @@ public String getType() { return StringTerms.NAME; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedStringTerms.class.getSimpleName(), true, ParsedStringTerms::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedStringTerms.class.getSimpleName(), + true, + ParsedStringTerms::new + ); static { declareParsedTermsFields(PARSER, ParsedBucket::fromXContent); } @@ -70,13 +73,13 @@ protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOExcept static ParsedBucket fromXContent(XContentParser parser) throws IOException { return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> { - CharBuffer cb = p.charBufferOrNull(); - if (cb == null) { - bucket.key = null; - } else { - bucket.key = new BytesRef(cb); - } - }); + CharBuffer cb = p.charBufferOrNull(); + if (cb == null) { + bucket.key = null; + } else { + bucket.key = new BytesRef(cb); + } + }); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedTerms.java index 6aa51a9a1b4ce..4d17f9e49aba7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/ParsedTerms.java @@ -9,11 +9,11 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.common.CheckedBiConsumer; -import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; @@ -68,13 +68,16 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) return builder; } - static void declareParsedTermsFields(final ObjectParser objectParser, - final CheckedFunction bucketParser) { + static void declareParsedTermsFields( + final ObjectParser objectParser, + final CheckedFunction bucketParser + ) { declareMultiBucketAggregationFields(objectParser, bucketParser, bucketParser); - objectParser.declareLong((parsedTerms, value) -> parsedTerms.docCountErrorUpperBound = value , - DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME); - objectParser.declareLong((parsedTerms, value) -> parsedTerms.sumOtherDocCount = value, - SUM_OF_OTHER_DOC_COUNTS); + objectParser.declareLong( + (parsedTerms, value) -> parsedTerms.docCountErrorUpperBound = value, + DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME + ); + objectParser.declareLong((parsedTerms, value) -> parsedTerms.sumOtherDocCount = value, SUM_OF_OTHER_DOC_COUNTS); } public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Terms.Bucket { @@ -100,10 +103,11 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) return builder; } - - static B parseTermsBucketXContent(final XContentParser parser, final Supplier bucketSupplier, - final CheckedBiConsumer keyConsumer) - throws IOException { + static B parseTermsBucketXContent( + final XContentParser parser, + final Supplier bucketSupplier, + final CheckedBiConsumer keyConsumer + ) throws IOException { final B bucket = bucketSupplier.get(); final List aggregations = new ArrayList<>(); @@ -125,8 +129,12 @@ static B parseTermsBucketXContent(final XContentParser bucket.showDocCountError = true; } } else if (token == XContentParser.Token.START_OBJECT) { - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class, - aggregations::add); + XContentParserUtils.parseTypedKeysObject( + parser, + Aggregation.TYPED_KEYS_DELIMITER, + Aggregation.class, + aggregations::add + ); } } bucket.setAggregations(new Aggregations(aggregations)); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTerms.java index e354b73fabbd6..9b544296982e8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTerms.java @@ -11,7 +11,6 @@ import java.util.List; - public interface RareTerms extends MultiBucketsAggregation { /** @@ -34,4 +33,3 @@ interface Bucket extends MultiBucketsAggregation.Bucket { Bucket getBucketByKey(String term); } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregationBuilder.java index 6fe4d4915e2ee..ac21b08d40772 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregationBuilder.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -30,24 +30,36 @@ public class RareTermsAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = "rare_terms"; - public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = - new ValuesSourceRegistry.RegistryKey<>(NAME, RareTermsAggregatorSupplier.class); + public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>( + NAME, + RareTermsAggregatorSupplier.class + ); private static final ParseField MAX_DOC_COUNT_FIELD_NAME = new ParseField("max_doc_count"); private static final ParseField PRECISION = new ParseField("precision"); private static final int MAX_MAX_DOC_COUNT = 100; - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, RareTermsAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + RareTermsAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); PARSER.declareLong(RareTermsAggregationBuilder::maxDocCount, MAX_DOC_COUNT_FIELD_NAME); - PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), - IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); + PARSER.declareField( + (b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), + IncludeExclude::parseInclude, + IncludeExclude.INCLUDE_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING + ); - PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), - IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY); + PARSER.declareField( + (b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), + IncludeExclude::parseExclude, + IncludeExclude.EXCLUDE_FIELD, + ObjectParser.ValueType.STRING_ARRAY + ); PARSER.declareDouble(RareTermsAggregationBuilder::setPrecision, PRECISION); } @@ -64,9 +76,11 @@ public RareTermsAggregationBuilder(String name) { super(name); } - private RareTermsAggregationBuilder(RareTermsAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + private RareTermsAggregationBuilder( + RareTermsAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.includeExclude = clone.includeExclude; } @@ -108,13 +122,20 @@ protected void innerWriteTo(StreamOutput out) throws IOException { public RareTermsAggregationBuilder maxDocCount(long maxDocCount) { if (maxDocCount <= 0) { throw new IllegalArgumentException( - "[" + MAX_DOC_COUNT_FIELD_NAME.getPreferredName() + "] must be greater than 0. Found [" - + maxDocCount + "] in [" + name + "]"); + "[" + + MAX_DOC_COUNT_FIELD_NAME.getPreferredName() + + "] must be greater than 0. Found [" + + maxDocCount + + "] in [" + + name + + "]" + ); } - //TODO review: what size cap should we put on this? + // TODO review: what size cap should we put on this? if (maxDocCount > MAX_MAX_DOC_COUNT) { - throw new IllegalArgumentException("[" + MAX_DOC_COUNT_FIELD_NAME.getPreferredName() + "] must be smaller" + - "than " + MAX_MAX_DOC_COUNT + "in [" + name + "]"); + throw new IllegalArgumentException( + "[" + MAX_DOC_COUNT_FIELD_NAME.getPreferredName() + "] must be smaller" + "than " + MAX_MAX_DOC_COUNT + "in [" + name + "]" + ); } this.maxDocCount = (int) maxDocCount; return this; @@ -163,16 +184,27 @@ public BucketCardinality bucketCardinality() { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - - RareTermsAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - - return new RareTermsAggregatorFactory(name, config, includeExclude, - context, parent, subFactoriesBuilder, metadata, maxDocCount, precision, aggregatorSupplier); + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + + RareTermsAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + + return new RareTermsAggregatorFactory( + name, + config, + includeExclude, + context, + parent, + subFactoriesBuilder, + metadata, + maxDocCount, + precision, + aggregatorSupplier + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java index 076bb718b2e24..13a4f34306751 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java @@ -8,8 +8,8 @@ package org.elasticsearch.search.aggregations.bucket.terms; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -36,13 +36,19 @@ public class RareTermsAggregatorFactory extends ValuesSourceAggregatorFactory { private final double precision; static void registerAggregators(ValuesSourceRegistry.Builder builder) { - builder.register(RareTermsAggregationBuilder.REGISTRY_KEY, + builder.register( + RareTermsAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.KEYWORD, CoreValuesSourceType.IP), - RareTermsAggregatorFactory.bytesSupplier(), true); + RareTermsAggregatorFactory.bytesSupplier(), + true + ); - builder.register(RareTermsAggregationBuilder.REGISTRY_KEY, + builder.register( + RareTermsAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.NUMERIC), - RareTermsAggregatorFactory.numericSupplier(), true); + RareTermsAggregatorFactory.numericSupplier(), + true + ); } /** @@ -52,24 +58,30 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { private static RareTermsAggregatorSupplier bytesSupplier() { return new RareTermsAggregatorSupplier() { @Override - public Aggregator build(String name, - AggregatorFactories factories, - ValuesSource valuesSource, - DocValueFormat format, - int maxDocCount, - double precision, - IncludeExclude includeExclude, - AggregationContext context, - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + public Aggregator build( + String name, + AggregatorFactories factories, + ValuesSource valuesSource, + DocValueFormat format, + int maxDocCount, + double precision, + IncludeExclude includeExclude, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { - ExecutionMode execution = ExecutionMode.MAP; //TODO global ords not implemented yet, only supports "map" + ExecutionMode execution = ExecutionMode.MAP; // TODO global ords not implemented yet, only supports "map" if ((includeExclude != null) && (includeExclude.isRegexBased()) && format != DocValueFormat.RAW) { - throw new IllegalArgumentException("Aggregation [" + name + "] cannot support " + - "regular expression style include/exclude settings as they can only be applied to string fields. " + - "Use an array of values for include/exclude clauses"); + throw new IllegalArgumentException( + "Aggregation [" + + name + + "] cannot support " + + "regular expression style include/exclude settings as they can only be applied to string fields. " + + "Use an array of values for include/exclude clauses" + ); } return execution.create( @@ -97,22 +109,28 @@ public Aggregator build(String name, private static RareTermsAggregatorSupplier numericSupplier() { return new RareTermsAggregatorSupplier() { @Override - public Aggregator build(String name, - AggregatorFactories factories, - ValuesSource valuesSource, - DocValueFormat format, - int maxDocCount, - double precision, - IncludeExclude includeExclude, - AggregationContext context, - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + public Aggregator build( + String name, + AggregatorFactories factories, + ValuesSource valuesSource, + DocValueFormat format, + int maxDocCount, + double precision, + IncludeExclude includeExclude, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { if ((includeExclude != null) && (includeExclude.isRegexBased())) { - throw new IllegalArgumentException("Aggregation [" + name + "] cannot support regular expression " + - "style include/exclude settings as they can only be applied to string fields. Use an array of numeric " + - "values for include/exclude clauses used to filter numeric fields"); + throw new IllegalArgumentException( + "Aggregation [" + + name + + "] cannot support regular expression " + + "style include/exclude settings as they can only be applied to string fields. Use an array of numeric " + + "values for include/exclude clauses used to filter numeric fields" + ); } IncludeExclude.LongFilter longFilter = null; @@ -139,12 +157,18 @@ public Aggregator build(String name, }; } - RareTermsAggregatorFactory(String name, ValuesSourceConfig config, - IncludeExclude includeExclude, - AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, int maxDocCount, double precision, - RareTermsAggregatorSupplier aggregatorSupplier) throws IOException { + RareTermsAggregatorFactory( + String name, + ValuesSourceConfig config, + IncludeExclude includeExclude, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + int maxDocCount, + double precision, + RareTermsAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; @@ -165,25 +189,21 @@ public InternalAggregation buildEmptyAggregation() { } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build( - name, - factories, - config.getValuesSource(), - config.format(), - maxDocCount, - precision, - includeExclude, - context, - parent, - cardinality, - metadata - ); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build( + name, + factories, + config.getValuesSource(), + config.format(), + maxDocCount, + precision, + includeExclude, + context, + parent, + cardinality, + metadata + ); } public enum ExecutionMode { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorSupplier.java index eb9b41001fdcc..d4119e97df111 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorSupplier.java @@ -18,15 +18,17 @@ import java.util.Map; interface RareTermsAggregatorSupplier { - Aggregator build(String name, - AggregatorFactories factories, - ValuesSource valuesSource, - DocValueFormat format, - int maxDocCount, - double precision, - IncludeExclude includeExclude, - AggregationContext context, - Aggregator parent, - CardinalityUpperBound carinality, - Map metadata) throws IOException; + Aggregator build( + String name, + AggregatorFactories factories, + ValuesSource valuesSource, + DocValueFormat format, + int maxDocCount, + double precision, + IncludeExclude includeExclude, + AggregationContext context, + Aggregator parent, + CardinalityUpperBound carinality, + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificanceLookup.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificanceLookup.java index a1e4f022d2916..b452c4e2436a4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificanceLookup.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificanceLookup.java @@ -18,13 +18,13 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.lucene.index.FilterableTermsEnum; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java index 9d05f98be904c..c39334fa38a1a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java @@ -29,8 +29,16 @@ public static class Bucket extends InternalSignificantTerms.Bucket { long term; - public Bucket(long subsetDf, long subsetSize, long supersetDf, long supersetSize, long term, InternalAggregations aggregations, - DocValueFormat format, double score) { + public Bucket( + long subsetDf, + long subsetSize, + long supersetDf, + long supersetSize, + long term, + InternalAggregations aggregations, + DocValueFormat format, + double score + ) { super(subsetDf, subsetSize, supersetDf, supersetSize, aggregations, format); this.term = term; this.score = score; @@ -89,11 +97,18 @@ public int hashCode() { } } - public SignificantLongTerms(String name, int requiredSize, long minDocCount, - Map metadata, DocValueFormat format, long subsetSize, long supersetSize, - SignificanceHeuristic significanceHeuristic, List buckets) { - super(name, requiredSize, minDocCount, metadata, format, subsetSize, supersetSize, significanceHeuristic, - buckets); + public SignificantLongTerms( + String name, + int requiredSize, + long minDocCount, + Map metadata, + DocValueFormat format, + long subsetSize, + long supersetSize, + SignificanceHeuristic significanceHeuristic, + List buckets + ) { + super(name, requiredSize, minDocCount, metadata, format, subsetSize, supersetSize, significanceHeuristic, buckets); } /** @@ -110,20 +125,46 @@ public String getWriteableName() { @Override public SignificantLongTerms create(List buckets) { - return new SignificantLongTerms(name, requiredSize, minDocCount, metadata, format, subsetSize, supersetSize, - significanceHeuristic, buckets); + return new SignificantLongTerms( + name, + requiredSize, + minDocCount, + metadata, + format, + subsetSize, + supersetSize, + significanceHeuristic, + buckets + ); } @Override public Bucket createBucket(InternalAggregations aggregations, SignificantLongTerms.Bucket prototype) { - return new Bucket(prototype.subsetDf, prototype.subsetSize, prototype.supersetDf, prototype.supersetSize, prototype.term, - aggregations, prototype.format, prototype.score); + return new Bucket( + prototype.subsetDf, + prototype.subsetSize, + prototype.supersetDf, + prototype.supersetSize, + prototype.term, + aggregations, + prototype.format, + prototype.score + ); } @Override protected SignificantLongTerms create(long subsetSize, long supersetSize, List buckets) { - return new SignificantLongTerms(getName(), requiredSize, minDocCount, getMetadata(), format, subsetSize, - supersetSize, significanceHeuristic, buckets); + return new SignificantLongTerms( + getName(), + requiredSize, + minDocCount, + getMetadata(), + format, + subsetSize, + supersetSize, + significanceHeuristic, + buckets + ); } @Override @@ -132,8 +173,14 @@ protected Bucket[] createBucketsArray(int size) { } @Override - Bucket createBucket(long subsetDf, long subsetSize, long supersetDf, long supersetSize, - InternalAggregations aggregations, SignificantLongTerms.Bucket prototype) { + Bucket createBucket( + long subsetDf, + long subsetSize, + long supersetDf, + long supersetSize, + InternalAggregations aggregations, + SignificantLongTerms.Bucket prototype + ) { return new Bucket(subsetDf, subsetSize, supersetDf, supersetSize, prototype.term, aggregations, format, prototype.score); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java index 360029bf0f556..064f10757bef0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java @@ -30,8 +30,16 @@ public static class Bucket extends InternalSignificantTerms.Bucket { BytesRef termBytes; - public Bucket(BytesRef term, long subsetDf, long subsetSize, long supersetDf, long supersetSize, InternalAggregations aggregations, - DocValueFormat format, double score) { + public Bucket( + BytesRef term, + long subsetDf, + long subsetSize, + long supersetDf, + long supersetSize, + InternalAggregations aggregations, + DocValueFormat format, + double score + ) { super(subsetDf, subsetSize, supersetDf, supersetSize, aggregations, format); this.termBytes = term; this.score = score; @@ -94,9 +102,17 @@ public int hashCode() { } } - public SignificantStringTerms(String name, int requiredSize, long minDocCount, - Map metadata, DocValueFormat format, long subsetSize, long supersetSize, - SignificanceHeuristic significanceHeuristic, List buckets) { + public SignificantStringTerms( + String name, + int requiredSize, + long minDocCount, + Map metadata, + DocValueFormat format, + long subsetSize, + long supersetSize, + SignificanceHeuristic significanceHeuristic, + List buckets + ) { super(name, requiredSize, minDocCount, metadata, format, subsetSize, supersetSize, significanceHeuristic, buckets); } @@ -114,20 +130,46 @@ public String getWriteableName() { @Override public SignificantStringTerms create(List buckets) { - return new SignificantStringTerms(name, requiredSize, minDocCount, metadata, format, subsetSize, - supersetSize, significanceHeuristic, buckets); + return new SignificantStringTerms( + name, + requiredSize, + minDocCount, + metadata, + format, + subsetSize, + supersetSize, + significanceHeuristic, + buckets + ); } @Override public Bucket createBucket(InternalAggregations aggregations, SignificantStringTerms.Bucket prototype) { - return new Bucket(prototype.termBytes, prototype.subsetDf, prototype.subsetSize, prototype.supersetDf, prototype.supersetSize, - aggregations, prototype.format, prototype.score); + return new Bucket( + prototype.termBytes, + prototype.subsetDf, + prototype.subsetSize, + prototype.supersetDf, + prototype.supersetSize, + aggregations, + prototype.format, + prototype.score + ); } @Override protected SignificantStringTerms create(long subsetSize, long supersetSize, List buckets) { - return new SignificantStringTerms(getName(), requiredSize, minDocCount, getMetadata(), format, subsetSize, - supersetSize, significanceHeuristic, buckets); + return new SignificantStringTerms( + getName(), + requiredSize, + minDocCount, + getMetadata(), + format, + subsetSize, + supersetSize, + significanceHeuristic, + buckets + ); } @Override @@ -136,8 +178,14 @@ protected Bucket[] createBucketsArray(int size) { } @Override - Bucket createBucket(long subsetDf, long subsetSize, long supersetDf, long supersetSize, - InternalAggregations aggregations, SignificantStringTerms.Bucket prototype) { + Bucket createBucket( + long subsetDf, + long subsetSize, + long supersetDf, + long supersetSize, + InternalAggregations aggregations, + SignificantStringTerms.Bucket prototype + ) { return new Bucket(prototype.termBytes, subsetDf, subsetSize, supersetDf, supersetSize, aggregations, format, prototype.score); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java index 384de0d7d7268..5fd1d4bef6b90 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; @@ -44,12 +44,19 @@ public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationB static final ParseField BACKGROUND_FILTER = new ParseField("background_filter"); static final TermsAggregator.BucketCountThresholds DEFAULT_BUCKET_COUNT_THRESHOLDS = new TermsAggregator.BucketCountThresholds( - 3, 0, 10, -1); + 3, + 0, + 10, + -1 + ); static final SignificanceHeuristic DEFAULT_SIGNIFICANCE_HEURISTIC = new JLHScore(); private static final ObjectParser PARSER = new ObjectParser<>( SignificantTermsAggregationBuilder.NAME, - SignificanceHeuristic.class, SignificantTermsAggregationBuilder::significanceHeuristic, null); + SignificanceHeuristic.class, + SignificantTermsAggregationBuilder::significanceHeuristic, + null + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); @@ -57,24 +64,33 @@ public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationB PARSER.declareLong(SignificantTermsAggregationBuilder::minDocCount, TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME); - PARSER.declareLong(SignificantTermsAggregationBuilder::shardMinDocCount, - TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME); + PARSER.declareLong(SignificantTermsAggregationBuilder::shardMinDocCount, TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME); PARSER.declareInt(SignificantTermsAggregationBuilder::size, TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME); - PARSER.declareString(SignificantTermsAggregationBuilder::executionHint, - TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME); + PARSER.declareString(SignificantTermsAggregationBuilder::executionHint, TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME); - PARSER.declareObject(SignificantTermsAggregationBuilder::backgroundFilter, - (p, context) -> parseInnerQueryBuilder(p), - SignificantTermsAggregationBuilder.BACKGROUND_FILTER); + PARSER.declareObject( + SignificantTermsAggregationBuilder::backgroundFilter, + (p, context) -> parseInnerQueryBuilder(p), + SignificantTermsAggregationBuilder.BACKGROUND_FILTER + ); - PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), - IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); + PARSER.declareField( + (b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), + IncludeExclude::parseInclude, + IncludeExclude.INCLUDE_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING + ); - PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), - IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY); + PARSER.declareField( + (b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), + IncludeExclude::parseExclude, + IncludeExclude.EXCLUDE_FIELD, + ObjectParser.ValueType.STRING_ARRAY + ); } + public static SignificantTermsAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { return PARSER.parse(parser, new SignificantTermsAggregationBuilder(aggregationName), null); } @@ -105,8 +121,11 @@ public SignificantTermsAggregationBuilder(StreamInput in) throws IOException { significanceHeuristic = in.readNamedWriteable(SignificanceHeuristic.class); } - protected SignificantTermsAggregationBuilder(SignificantTermsAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, Map metadata) { + protected SignificantTermsAggregationBuilder( + SignificantTermsAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.bucketCountThresholds = new BucketCountThresholds(clone.bucketCountThresholds); this.executionHint = clone.executionHint; @@ -187,8 +206,7 @@ public SignificantTermsAggregationBuilder size(int size) { */ public SignificantTermsAggregationBuilder shardSize(int shardSize) { if (shardSize <= 0) { - throw new IllegalArgumentException( - "[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]"); + throw new IllegalArgumentException("[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]"); } bucketCountThresholds.setShardSize(shardSize); return this; @@ -201,7 +219,8 @@ public SignificantTermsAggregationBuilder shardSize(int shardSize) { public SignificantTermsAggregationBuilder minDocCount(long minDocCount) { if (minDocCount < 0) { throw new IllegalArgumentException( - "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]"); + "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]" + ); } bucketCountThresholds.setMinDocCount(minDocCount); return this; @@ -214,7 +233,8 @@ public SignificantTermsAggregationBuilder minDocCount(long minDocCount) { public SignificantTermsAggregationBuilder shardMinDocCount(long shardMinDocCount) { if (shardMinDocCount < 0) { throw new IllegalArgumentException( - "[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]"); + "[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]" + ); } bucketCountThresholds.setShardMinDocCount(shardMinDocCount); return this; @@ -280,16 +300,29 @@ public BucketCardinality bucketCardinality() { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { SignificanceHeuristic executionHeuristic = significanceHeuristic.rewrite(context); - SignificantTermsAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new SignificantTermsAggregatorFactory(name, config, includeExclude, executionHint, filterBuilder, - bucketCountThresholds, executionHeuristic, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); + SignificantTermsAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new SignificantTermsAggregatorFactory( + name, + config, + includeExclude, + executionHint, + filterBuilder, + bucketCountThresholds, + executionHeuristic, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override @@ -320,10 +353,10 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; SignificantTermsAggregationBuilder other = (SignificantTermsAggregationBuilder) obj; return Objects.equals(bucketCountThresholds, other.bucketCountThresholds) - && Objects.equals(executionHint, other.executionHint) - && Objects.equals(filterBuilder, other.filterBuilder) - && Objects.equals(includeExclude, other.includeExclude) - && Objects.equals(significanceHeuristic, other.significanceHeuristic); + && Objects.equals(executionHint, other.executionHint) + && Objects.equals(filterBuilder, other.filterBuilder) + && Objects.equals(includeExclude, other.includeExclude) + && Objects.equals(significanceHeuristic, other.significanceHeuristic); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java index af8a34c0fae16..5f98383f12938 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java @@ -9,9 +9,9 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.apache.lucene.index.SortedSetDocValues; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -39,13 +39,19 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(SignificantTermsAggregatorFactory.class); static void registerAggregators(ValuesSourceRegistry.Builder builder) { - builder.register(SignificantTermsAggregationBuilder.REGISTRY_KEY, + builder.register( + SignificantTermsAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.KEYWORD, CoreValuesSourceType.IP), - SignificantTermsAggregatorFactory.bytesSupplier(), true); + SignificantTermsAggregatorFactory.bytesSupplier(), + true + ); - builder.register(SignificantTermsAggregationBuilder.REGISTRY_KEY, + builder.register( + SignificantTermsAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.NUMERIC), - SignificantTermsAggregatorFactory.numericSupplier(), true); + SignificantTermsAggregatorFactory.numericSupplier(), + true + ); } /** @@ -55,19 +61,21 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { private static SignificantTermsAggregatorSupplier bytesSupplier() { return new SignificantTermsAggregatorSupplier() { @Override - public Aggregator build(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - DocValueFormat format, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - String executionHint, - AggregationContext context, - Aggregator parent, - SignificanceHeuristic significanceHeuristic, - SignificanceLookup lookup, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + public Aggregator build( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + String executionHint, + AggregationContext context, + Aggregator parent, + SignificanceHeuristic significanceHeuristic, + SignificanceLookup lookup, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { ExecutionMode execution = null; if (executionHint != null) { @@ -81,13 +89,29 @@ public Aggregator build(String name, } if ((includeExclude != null) && (includeExclude.isRegexBased()) && format != DocValueFormat.RAW) { - throw new IllegalArgumentException("Aggregation [" + name + "] cannot support regular expression style " - + "include/exclude settings as they can only be applied to string fields. Use an array of values for " - + "include/exclude clauses"); + throw new IllegalArgumentException( + "Aggregation [" + + name + + "] cannot support regular expression style " + + "include/exclude settings as they can only be applied to string fields. Use an array of values for " + + "include/exclude clauses" + ); } - return execution.create(name, factories, valuesSourceConfig, format, bucketCountThresholds, includeExclude, context, parent, - significanceHeuristic, lookup, cardinality, metadata); + return execution.create( + name, + factories, + valuesSourceConfig, + format, + bucketCountThresholds, + includeExclude, + context, + parent, + significanceHeuristic, + lookup, + cardinality, + metadata + ); } }; } @@ -99,24 +123,30 @@ public Aggregator build(String name, private static SignificantTermsAggregatorSupplier numericSupplier() { return new SignificantTermsAggregatorSupplier() { @Override - public Aggregator build(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - DocValueFormat format, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - String executionHint, - AggregationContext context, - Aggregator parent, - SignificanceHeuristic significanceHeuristic, - SignificanceLookup lookup, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + public Aggregator build( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + String executionHint, + AggregationContext context, + Aggregator parent, + SignificanceHeuristic significanceHeuristic, + SignificanceLookup lookup, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { if ((includeExclude != null) && (includeExclude.isRegexBased())) { - throw new IllegalArgumentException("Aggregation [" + name + "] cannot support regular expression style include/exclude " - + "settings as they can only be applied to string fields. Use an array of numeric " + - "values for include/exclude clauses used to filter numeric fields"); + throw new IllegalArgumentException( + "Aggregation [" + + name + + "] cannot support regular expression style include/exclude " + + "settings as they can only be applied to string fields. Use an array of numeric " + + "values for include/exclude clauses used to filter numeric fields" + ); } ValuesSource.Numeric numericValuesSource = (ValuesSource.Numeric) valuesSourceConfig.getValuesSource(); @@ -129,10 +159,21 @@ public Aggregator build(String name, longFilter = includeExclude.convertToLongFilter(format); } - return new NumericTermsAggregator(name, factories, + return new NumericTermsAggregator( + name, + factories, agg -> agg.new SignificantLongTermsResults(lookup, significanceHeuristic, cardinality), - numericValuesSource, format, null, bucketCountThresholds, context, parent, SubAggCollectionMode.BREADTH_FIRST, - longFilter, cardinality, metadata); + numericValuesSource, + format, + null, + bucketCountThresholds, + context, + parent, + SubAggCollectionMode.BREADTH_FIRST, + longFilter, + cardinality, + metadata + ); } }; } @@ -144,24 +185,29 @@ public Aggregator build(String name, private final TermsAggregator.BucketCountThresholds bucketCountThresholds; private final SignificanceHeuristic significanceHeuristic; - SignificantTermsAggregatorFactory(String name, - ValuesSourceConfig config, - IncludeExclude includeExclude, - String executionHint, - QueryBuilder backgroundFilter, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - SignificanceHeuristic significanceHeuristic, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - SignificantTermsAggregatorSupplier aggregatorSupplier) throws IOException { + SignificantTermsAggregatorFactory( + String name, + ValuesSourceConfig config, + IncludeExclude includeExclude, + String executionHint, + QueryBuilder backgroundFilter, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + SignificanceHeuristic significanceHeuristic, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + SignificantTermsAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); if (config.hasValues()) { if (config.fieldContext().fieldType().isSearchable() == false) { - throw new IllegalArgumentException("SignificantText aggregation requires fields to be searchable, but [" - + config.fieldContext().fieldType().name() + "] is not"); + throw new IllegalArgumentException( + "SignificantText aggregation requires fields to be searchable, but [" + + config.fieldContext().fieldType().name() + + "] is not" + ); } } @@ -175,8 +221,12 @@ public Aggregator build(String name, @Override protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - final InternalAggregation aggregation = new UnmappedSignificantTerms(name, bucketCountThresholds.getRequiredSize(), - bucketCountThresholds.getMinDocCount(), metadata); + final InternalAggregation aggregation = new UnmappedSignificantTerms( + name, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata + ); return new NonCollectingAggregator(name, context, parent, factories, metadata) { @Override public InternalAggregation buildEmptyAggregation() { @@ -186,11 +236,8 @@ public InternalAggregation buildEmptyAggregation() { } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(this.bucketCountThresholds); if (bucketCountThresholds.getShardSize() == SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { // The user has not made a shardSize selection . @@ -208,12 +255,7 @@ protected Aggregator doCreateInternal( bucketCountThresholds.setShardSize(2 * BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize())); } - SignificanceLookup lookup = new SignificanceLookup( - context, - config.fieldContext().fieldType(), - config.format(), - backgroundFilter - ); + SignificanceLookup lookup = new SignificanceLookup(context, config.fieldContext().fieldType(), config.format(), backgroundFilter); return aggregatorSupplier.build( name, @@ -237,18 +279,20 @@ public enum ExecutionMode { MAP(new ParseField("map")) { @Override - Aggregator create(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - DocValueFormat format, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - AggregationContext context, - Aggregator parent, - SignificanceHeuristic significanceHeuristic, - SignificanceLookup lookup, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + Aggregator create( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + AggregationContext context, + Aggregator parent, + SignificanceHeuristic significanceHeuristic, + SignificanceLookup lookup, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format); return new MapStringTermsAggregator( @@ -274,18 +318,20 @@ Aggregator create(String name, GLOBAL_ORDINALS(new ParseField("global_ordinals")) { @Override - Aggregator create(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - DocValueFormat format, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - AggregationContext context, - Aggregator parent, - SignificanceHeuristic significanceHeuristic, - SignificanceLookup lookup, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + Aggregator create( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + AggregationContext context, + Aggregator parent, + SignificanceHeuristic significanceHeuristic, + SignificanceLookup lookup, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { boolean remapGlobalOrd = true; if (cardinality == CardinalityUpperBound.ONE && factories == AggregatorFactories.EMPTY && includeExclude == null) { @@ -326,8 +372,11 @@ public static ExecutionMode fromString(String value, final DeprecationLogger dep if ("global_ordinals".equals(value)) { return GLOBAL_ORDINALS; } else if ("global_ordinals_hash".equals(value)) { - deprecationLogger.deprecate(DeprecationCategory.AGGREGATIONS, "global_ordinals_hash", - "global_ordinals_hash is deprecated. Please use [global_ordinals] instead."); + deprecationLogger.deprecate( + DeprecationCategory.AGGREGATIONS, + "global_ordinals_hash", + "global_ordinals_hash is deprecated. Please use [global_ordinals] instead." + ); return GLOBAL_ORDINALS; } else if ("map".equals(value)) { return MAP; @@ -341,18 +390,20 @@ public static ExecutionMode fromString(String value, final DeprecationLogger dep this.parseField = parseField; } - abstract Aggregator create(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - DocValueFormat format, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - AggregationContext context, - Aggregator parent, - SignificanceHeuristic significanceHeuristic, - SignificanceLookup lookup, - CardinalityUpperBound cardinality, - Map metadata) throws IOException; + abstract Aggregator create( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + AggregationContext context, + Aggregator parent, + SignificanceHeuristic significanceHeuristic, + SignificanceLookup lookup, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException; @Override public String toString() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorSupplier.java index a92279f5e20b5..5c75112c89b41 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorSupplier.java @@ -19,17 +19,19 @@ import java.util.Map; interface SignificantTermsAggregatorSupplier { - Aggregator build(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - DocValueFormat format, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - String executionHint, - AggregationContext context, - Aggregator parent, - SignificanceHeuristic significanceHeuristic, - SignificanceLookup lookup, - CardinalityUpperBound cardinality, - Map metadata) throws IOException; + Aggregator build( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + String executionHint, + AggregationContext context, + Aggregator parent, + SignificanceHeuristic significanceHeuristic, + SignificanceLookup lookup, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregationBuilder.java index 4664d08248d9a..488d2ace1c8ea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregationBuilder.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.terms; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.AbstractQueryBuilder; @@ -36,64 +36,71 @@ public class SignificantTextAggregationBuilder extends AbstractAggregationBuilde static final ParseField FIELD_NAME = new ParseField("field"); static final ParseField SOURCE_FIELDS_NAME = new ParseField("source_fields"); - static final ParseField FILTER_DUPLICATE_TEXT_FIELD_NAME = new ParseField( - "filter_duplicate_text"); + static final ParseField FILTER_DUPLICATE_TEXT_FIELD_NAME = new ParseField("filter_duplicate_text"); static final TermsAggregator.BucketCountThresholds DEFAULT_BUCKET_COUNT_THRESHOLDS = - SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS; + SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS; static final SignificanceHeuristic DEFAULT_SIGNIFICANCE_HEURISTIC = SignificantTermsAggregationBuilder.DEFAULT_SIGNIFICANCE_HEURISTIC; private String fieldName = null; - private String [] sourceFieldNames = null; + private String[] sourceFieldNames = null; private boolean filterDuplicateText = false; private IncludeExclude includeExclude = null; private QueryBuilder filterBuilder = null; - private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds( - DEFAULT_BUCKET_COUNT_THRESHOLDS); + private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(DEFAULT_BUCKET_COUNT_THRESHOLDS); private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC; private static final ObjectParser PARSER = new ObjectParser<>( - SignificantTextAggregationBuilder.NAME, - SignificanceHeuristic.class, SignificantTextAggregationBuilder::significanceHeuristic, null); + SignificantTextAggregationBuilder.NAME, + SignificanceHeuristic.class, + SignificantTextAggregationBuilder::significanceHeuristic, + null + ); static { - PARSER.declareInt(SignificantTextAggregationBuilder::shardSize, - TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME); + PARSER.declareInt(SignificantTextAggregationBuilder::shardSize, TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME); - PARSER.declareLong(SignificantTextAggregationBuilder::minDocCount, - TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME); + PARSER.declareLong(SignificantTextAggregationBuilder::minDocCount, TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME); - PARSER.declareLong(SignificantTextAggregationBuilder::shardMinDocCount, - TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME); + PARSER.declareLong(SignificantTextAggregationBuilder::shardMinDocCount, TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME); - PARSER.declareInt(SignificantTextAggregationBuilder::size, - TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME); + PARSER.declareInt(SignificantTextAggregationBuilder::size, TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME); PARSER.declareString(SignificantTextAggregationBuilder::fieldName, FIELD_NAME); PARSER.declareStringArray(SignificantTextAggregationBuilder::sourceFieldNames, SOURCE_FIELDS_NAME); - - PARSER.declareBoolean(SignificantTextAggregationBuilder::filterDuplicateText, - FILTER_DUPLICATE_TEXT_FIELD_NAME); - - PARSER.declareObject(SignificantTextAggregationBuilder::backgroundFilter, - (p, context) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), - SignificantTermsAggregationBuilder.BACKGROUND_FILTER); - - PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), - IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, - ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); - - PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), - IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, - ObjectParser.ValueType.STRING_ARRAY); + PARSER.declareBoolean(SignificantTextAggregationBuilder::filterDuplicateText, FILTER_DUPLICATE_TEXT_FIELD_NAME); + + PARSER.declareObject( + SignificantTextAggregationBuilder::backgroundFilter, + (p, context) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), + SignificantTermsAggregationBuilder.BACKGROUND_FILTER + ); + + PARSER.declareField( + (b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), + IncludeExclude::parseInclude, + IncludeExclude.INCLUDE_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING + ); + + PARSER.declareField( + (b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), + IncludeExclude::parseExclude, + IncludeExclude.EXCLUDE_FIELD, + ObjectParser.ValueType.STRING_ARRAY + ); } + public static SignificantTextAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { return PARSER.parse(parser, new SignificantTextAggregationBuilder(aggregationName, null), null); } - protected SignificantTextAggregationBuilder(SignificantTextAggregationBuilder clone, - Builder factoriesBuilder, Map metadata) { + protected SignificantTextAggregationBuilder( + SignificantTextAggregationBuilder clone, + Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.bucketCountThresholds = new BucketCountThresholds(clone.bucketCountThresholds); this.fieldName = clone.fieldName; @@ -117,24 +124,23 @@ public TermsAggregator.BucketCountThresholds bucketCountThresholds() { return bucketCountThresholds; } - @Override public SignificantTextAggregationBuilder subAggregations(Builder subFactories) { - throw new AggregationInitializationException("Aggregator [" + name + "] of type [" - + getType() + "] cannot accept sub-aggregations"); + throw new AggregationInitializationException( + "Aggregator [" + name + "] of type [" + getType() + "] cannot accept sub-aggregations" + ); } @Override public SignificantTextAggregationBuilder subAggregation(AggregationBuilder aggregation) { - throw new AggregationInitializationException("Aggregator [" + name + "] of type [" - + getType() + "] cannot accept sub-aggregations"); + throw new AggregationInitializationException( + "Aggregator [" + name + "] of type [" + getType() + "] cannot accept sub-aggregations" + ); } - public SignificantTextAggregationBuilder bucketCountThresholds( - TermsAggregator.BucketCountThresholds bucketCountThresholds) { + public SignificantTextAggregationBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) { if (bucketCountThresholds == null) { - throw new IllegalArgumentException( - "[bucketCountThresholds] must not be null: [" + name + "]"); + throw new IllegalArgumentException("[bucketCountThresholds] must not be null: [" + name + "]"); } this.bucketCountThresholds = bucketCountThresholds; return this; @@ -146,8 +152,7 @@ public SignificantTextAggregationBuilder bucketCountThresholds( */ public SignificantTextAggregationBuilder size(int size) { if (size <= 0) { - throw new IllegalArgumentException( - "[size] must be greater than 0. Found [" + size + "] in [" + name + "]"); + throw new IllegalArgumentException("[size] must be greater than 0. Found [" + size + "] in [" + name + "]"); } bucketCountThresholds.setRequiredSize(size); return this; @@ -161,8 +166,7 @@ public SignificantTextAggregationBuilder size(int size) { */ public SignificantTextAggregationBuilder shardSize(int shardSize) { if (shardSize <= 0) { - throw new IllegalArgumentException("[shardSize] must be greater than 0. Found [" - + shardSize + "] in [" + name + "]"); + throw new IllegalArgumentException("[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]"); } bucketCountThresholds.setShardSize(shardSize); return this; @@ -177,18 +181,16 @@ public SignificantTextAggregationBuilder fieldName(String fieldName) { return this; } - /** * Selects the fields to load from _source JSON and analyze. * If none are specified, the indexed "fieldName" value is assumed * to also be the name of the JSON field holding the value */ public SignificantTextAggregationBuilder sourceFieldNames(List names) { - this.sourceFieldNames = names.toArray(new String [names.size()]); + this.sourceFieldNames = names.toArray(new String[names.size()]); return this; } - /** * Control if duplicate paragraphs of text should try be filtered from the * statistical text analysis. Can improve results but slows down analysis. @@ -206,8 +208,8 @@ public SignificantTextAggregationBuilder filterDuplicateText(boolean filterDupli public SignificantTextAggregationBuilder minDocCount(long minDocCount) { if (minDocCount < 0) { throw new IllegalArgumentException( - "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount - + "] in [" + name + "]"); + "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]" + ); } bucketCountThresholds.setMinDocCount(minDocCount); return this; @@ -220,8 +222,8 @@ public SignificantTextAggregationBuilder minDocCount(long minDocCount) { public SignificantTextAggregationBuilder shardMinDocCount(long shardMinDocCount) { if (shardMinDocCount < 0) { throw new IllegalArgumentException( - "[shardMinDocCount] must be greater than or equal to 0. Found [" - + shardMinDocCount + "] in [" + name + "]"); + "[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]" + ); } bucketCountThresholds.setShardMinDocCount(shardMinDocCount); return this; @@ -229,8 +231,7 @@ public SignificantTextAggregationBuilder shardMinDocCount(long shardMinDocCount) public SignificantTextAggregationBuilder backgroundFilter(QueryBuilder backgroundFilter) { if (backgroundFilter == null) { - throw new IllegalArgumentException( - "[backgroundFilter] must not be null: [" + name + "]"); + throw new IllegalArgumentException("[backgroundFilter] must not be null: [" + name + "]"); } this.filterBuilder = backgroundFilter; return this; @@ -255,11 +256,9 @@ public IncludeExclude includeExclude() { return includeExclude; } - public SignificantTextAggregationBuilder significanceHeuristic( - SignificanceHeuristic significanceHeuristic) { + public SignificantTextAggregationBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) { if (significanceHeuristic == null) { - throw new IllegalArgumentException( - "[significanceHeuristic] must not be null: [" + name + "]"); + throw new IllegalArgumentException("[significanceHeuristic] must not be null: [" + name + "]"); } this.significanceHeuristic = significanceHeuristic; return this; @@ -313,18 +312,28 @@ public BucketCardinality bucketCardinality() { } @Override - protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, - Builder subFactoriesBuilder) throws IOException { + protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, Builder subFactoriesBuilder) + throws IOException { SignificanceHeuristic executionHeuristic = this.significanceHeuristic.rewrite(context); - return new SignificantTextAggregatorFactory(name, includeExclude, filterBuilder, - bucketCountThresholds, executionHeuristic, context, parent, subFactoriesBuilder, - fieldName, sourceFieldNames, filterDuplicateText, metadata); + return new SignificantTextAggregatorFactory( + name, + includeExclude, + filterBuilder, + bucketCountThresholds, + executionHeuristic, + context, + parent, + subFactoriesBuilder, + fieldName, + sourceFieldNames, + filterDuplicateText, + metadata + ); } @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) - throws IOException { + protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); bucketCountThresholds.toXContent(builder, params); if (fieldName != null) { @@ -338,8 +347,7 @@ protected XContentBuilder internalXContent(XContentBuilder builder, Params param builder.field(FILTER_DUPLICATE_TEXT_FIELD_NAME.getPreferredName(), filterDuplicateText); } if (filterBuilder != null) { - builder.field(SignificantTermsAggregationBuilder.BACKGROUND_FILTER.getPreferredName(), - filterBuilder); + builder.field(SignificantTermsAggregationBuilder.BACKGROUND_FILTER.getPreferredName(), filterBuilder); } if (includeExclude != null) { includeExclude.toXContent(builder, params); @@ -352,9 +360,16 @@ protected XContentBuilder internalXContent(XContentBuilder builder, Params param @Override public int hashCode() { - return Objects.hash(super.hashCode(), bucketCountThresholds, fieldName, - filterDuplicateText, filterBuilder, - includeExclude, significanceHeuristic, Arrays.hashCode(sourceFieldNames)); + return Objects.hash( + super.hashCode(), + bucketCountThresholds, + fieldName, + filterDuplicateText, + filterBuilder, + includeExclude, + significanceHeuristic, + Arrays.hashCode(sourceFieldNames) + ); } @Override @@ -364,12 +379,12 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; SignificantTextAggregationBuilder other = (SignificantTextAggregationBuilder) obj; return Objects.equals(bucketCountThresholds, other.bucketCountThresholds) - && Objects.equals(fieldName, other.fieldName) - && Arrays.equals(sourceFieldNames, other.sourceFieldNames) - && filterDuplicateText == other.filterDuplicateText - && Objects.equals(filterBuilder, other.filterBuilder) - && Objects.equals(includeExclude, other.includeExclude) - && Objects.equals(significanceHeuristic, other.significanceHeuristic); + && Objects.equals(fieldName, other.fieldName) + && Arrays.equals(sourceFieldNames, other.sourceFieldNames) + && filterDuplicateText == other.filterDuplicateText + && Objects.equals(filterBuilder, other.filterBuilder) + && Objects.equals(includeExclude, other.includeExclude) + && Objects.equals(significanceHeuristic, other.significanceHeuristic); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java index 85145f83cbe49..1ccfd3579c886 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java @@ -16,10 +16,10 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.query.QueryBuilder; @@ -61,28 +61,31 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory { private final SignificanceHeuristic significanceHeuristic; private final boolean filterDuplicateText; - public SignificantTextAggregatorFactory(String name, - IncludeExclude includeExclude, - QueryBuilder backgroundFilter, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - SignificanceHeuristic significanceHeuristic, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - String fieldName, - String[] sourceFieldNames, - boolean filterDuplicateText, - Map metadata) throws IOException { + public SignificantTextAggregatorFactory( + String name, + IncludeExclude includeExclude, + QueryBuilder backgroundFilter, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + SignificanceHeuristic significanceHeuristic, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + String fieldName, + String[] sourceFieldNames, + boolean filterDuplicateText, + Map metadata + ) throws IOException { super(name, context, parent, subFactoriesBuilder, metadata); this.fieldType = context.getFieldType(fieldName); if (fieldType != null) { if (supportsAgg(fieldType) == false) { - throw new IllegalArgumentException("Field [" + fieldType.name() + "] has no analyzer, but SignificantText " + - "requires an analyzed field"); + throw new IllegalArgumentException( + "Field [" + fieldType.name() + "] has no analyzer, but SignificantText " + "requires an analyzed field" + ); } String indexedFieldName = fieldType.name(); - this.sourceFieldNames = sourceFieldNames == null ? new String[] {indexedFieldName} : sourceFieldNames; + this.sourceFieldNames = sourceFieldNames == null ? new String[] { indexedFieldName } : sourceFieldNames; } else { this.sourceFieldNames = new String[0]; } @@ -95,8 +98,12 @@ public SignificantTextAggregatorFactory(String name, } protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - final InternalAggregation aggregation = new UnmappedSignificantTerms(name, bucketCountThresholds.getRequiredSize(), - bucketCountThresholds.getMinDocCount(), metadata); + final InternalAggregation aggregation = new UnmappedSignificantTerms( + name, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata + ); return new NonCollectingAggregator(name, context, parent, factories, metadata) { @Override public InternalAggregation buildEmptyAggregation() { @@ -106,8 +113,7 @@ public InternalAggregation buildEmptyAggregation() { } private static boolean supportsAgg(MappedFieldType ft) { - return ft.getTextSearchInfo() != TextSearchInfo.NONE - && ft.getTextSearchInfo() != TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS; + return ft.getTextSearchInfo() != TextSearchInfo.NONE && ft.getTextSearchInfo() != TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS; } @Override @@ -132,11 +138,9 @@ protected Aggregator createInternal(Aggregator parent, CardinalityUpperBound car bucketCountThresholds.setShardSize(2 * BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize())); } -// TODO - need to check with mapping that this is indeed a text field.... - - IncludeExclude.StringFilter incExcFilter = includeExclude == null ? null: - includeExclude.convertToStringFilter(DocValueFormat.RAW); + // TODO - need to check with mapping that this is indeed a text field.... + IncludeExclude.StringFilter incExcFilter = includeExclude == null ? null : includeExclude.convertToStringFilter(DocValueFormat.RAW); SignificanceLookup lookup = new SignificanceLookup(context, fieldType, DocValueFormat.RAW, backgroundFilter); return new MapStringTermsAggregator( @@ -179,9 +183,7 @@ protected Aggregator createInternal(Aggregator parent, CardinalityUpperBound car * While we're at it we count the number of values we fetch from source. */ private CollectorSource createCollectorSource() { - Analyzer analyzer = context.getIndexAnalyzer(f -> { - throw new IllegalArgumentException("No analyzer configured for field " + f); - }); + Analyzer analyzer = context.getIndexAnalyzer(f -> { throw new IllegalArgumentException("No analyzer configured for field " + f); }); if (context.profiling()) { return new ProfilingSignificantTextCollectorSource( context.lookup().source(), @@ -233,8 +235,7 @@ public String describe() { } @Override - public void collectDebugInfo(BiConsumer add) { - } + public void collectDebugInfo(BiConsumer add) {} @Override public boolean needsScores() { @@ -272,17 +273,15 @@ private void collectFromSource(int doc, long owningBucketOrd, DuplicateByteSeque try { for (String sourceField : sourceFieldNames) { - Iterator itr = extractRawValues(sourceField).stream() - .map(obj -> { - if (obj == null) { - return null; - } - if (obj instanceof BytesRef) { - return fieldType.valueForDisplay(obj).toString(); - } - return obj.toString(); - }) - .iterator(); + Iterator itr = extractRawValues(sourceField).stream().map(obj -> { + if (obj == null) { + return null; + } + if (obj instanceof BytesRef) { + return fieldType.valueForDisplay(obj).toString(); + } + return obj.toString(); + }).iterator(); while (itr.hasNext()) { String text = itr.next(); TokenStream ts = analyzer.tokenStream(fieldType.name(), text); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTerms.java index a11b4462c8389..3b9d886ad1070 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTerms.java @@ -90,8 +90,15 @@ public int hashCode() { } } - StringRareTerms(String name, BucketOrder order, Map metadata, DocValueFormat format, - List buckets, long maxDocCount, SetBackedScalingCuckooFilter filter) { + StringRareTerms( + String name, + BucketOrder order, + Map metadata, + DocValueFormat format, + List buckets, + long maxDocCount, + SetBackedScalingCuckooFilter filter + ) { super(name, order, metadata, format, buckets, maxDocCount, filter); } @@ -118,8 +125,11 @@ public StringRareTerms.Bucket createBucket(InternalAggregations aggregations, St } @Override - protected StringRareTerms createWithFilter(String name, List buckets, - SetBackedScalingCuckooFilter filterFilter) { + protected StringRareTerms createWithFilter( + String name, + List buckets, + SetBackedScalingCuckooFilter filterFilter + ) { return new StringRareTerms(name, order, metadata, format, buckets, maxDocCount, filterFilter); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java index ac90f9ddd6812..c564f117fe2fe 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java @@ -10,9 +10,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.common.util.SetBackedScalingCuckooFilter; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -55,24 +55,14 @@ public class StringRareTermsAggregator extends AbstractRareTermsAggregator { double precision, CardinalityUpperBound cardinality ) throws IOException { - super( - name, - factories, - context, - parent, - metadata, - maxDocCount, - precision, - format - ); + super(name, factories, context, parent, metadata, maxDocCount, precision, format); this.valuesSource = valuesSource; this.filter = filter; this.bucketOrds = BytesKeyedBucketOrds.build(bigArrays(), cardinality); } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { final SortedBinaryDocValues values = valuesSource.bytesValues(ctx); return new LeafBucketCollectorBase(sub, values) { final BytesRefBuilder previous = new BytesRefBuilder(); @@ -187,4 +177,3 @@ public void doClose() { Releasables.close(bucketOrds); } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java index 86d03c3c2d491..4fb5b055e325a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java @@ -25,11 +25,18 @@ */ public class StringTerms extends InternalMappedTerms { public static final String NAME = "sterms"; + public static class Bucket extends InternalTerms.Bucket { BytesRef termBytes; - public Bucket(BytesRef term, long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError, - DocValueFormat format) { + public Bucket( + BytesRef term, + long docCount, + InternalAggregations aggregations, + boolean showDocCountError, + long docCountError, + DocValueFormat format + ) { super(docCount, aggregations, showDocCountError, docCountError, format); this.termBytes = term; } @@ -92,11 +99,34 @@ public int hashCode() { } } - public StringTerms(String name, BucketOrder reduceOrder, BucketOrder order, int requiredSize, long minDocCount, - Map metadata, DocValueFormat format, int shardSize, boolean showTermDocCountError, long otherDocCount, - List buckets, Long docCountError) { - super(name, reduceOrder, order, requiredSize, minDocCount, metadata, format, - shardSize, showTermDocCountError, otherDocCount, buckets, docCountError); + public StringTerms( + String name, + BucketOrder reduceOrder, + BucketOrder order, + int requiredSize, + long minDocCount, + Map metadata, + DocValueFormat format, + int shardSize, + boolean showTermDocCountError, + long otherDocCount, + List buckets, + Long docCountError + ) { + super( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } /** @@ -113,14 +143,32 @@ public String getWriteableName() { @Override public StringTerms create(List buckets) { - return new StringTerms(name, reduceOrder, order, requiredSize, minDocCount, metadata, format, shardSize, - showTermDocCountError, otherDocCount, buckets, docCountError); + return new StringTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.termBytes, prototype.docCount, aggregations, prototype.showDocCountError, prototype.docCountError, - prototype.format); + return new Bucket( + prototype.termBytes, + prototype.docCount, + aggregations, + prototype.showDocCountError, + prototype.docCountError, + prototype.format + ); } @Override @@ -130,7 +178,19 @@ protected Bucket createBucket(long docCount, InternalAggregations aggs, long doc @Override protected StringTerms create(String name, List buckets, BucketOrder reduceOrder, long docCountError, long otherDocCount) { - return new StringTerms(name, reduceOrder, order, requiredSize, minDocCount, getMetadata(), format, shardSize, - showTermDocCountError, otherDocCount, buckets, docCountError); + return new StringTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + getMetadata(), + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java index 8dceb9c0881b7..4c1644ec7959d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java @@ -8,11 +8,11 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -48,18 +48,20 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder PARSER = - ObjectParser.fromBuilder(NAME, TermsAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder(NAME, TermsAggregationBuilder::new); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); - PARSER.declareBoolean(TermsAggregationBuilder::showTermDocCountError, - TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR); + PARSER.declareBoolean(TermsAggregationBuilder::showTermDocCountError, TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR); PARSER.declareInt(TermsAggregationBuilder::shardSize, SHARD_SIZE_FIELD_NAME); @@ -71,18 +73,32 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder SubAggCollectionMode.parse(p.text(), LoggingDeprecationHandler.INSTANCE), - SubAggCollectionMode.KEY, ObjectParser.ValueType.STRING); - - PARSER.declareObjectArray(TermsAggregationBuilder::order, (p, c) -> InternalOrder.Parser.parseOrderParam(p), - TermsAggregationBuilder.ORDER_FIELD); - - PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), - IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); - - PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), - IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY); + PARSER.declareField( + TermsAggregationBuilder::collectMode, + (p, c) -> SubAggCollectionMode.parse(p.text(), LoggingDeprecationHandler.INSTANCE), + SubAggCollectionMode.KEY, + ObjectParser.ValueType.STRING + ); + + PARSER.declareObjectArray( + TermsAggregationBuilder::order, + (p, c) -> InternalOrder.Parser.parseOrderParam(p), + TermsAggregationBuilder.ORDER_FIELD + ); + + PARSER.declareField( + (b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), + IncludeExclude::parseInclude, + IncludeExclude.INCLUDE_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING + ); + + PARSER.declareField( + (b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), + IncludeExclude::parseExclude, + IncludeExclude.EXCLUDE_FIELD, + ObjectParser.ValueType.STRING_ARRAY + ); } public static void registerAggregators(ValuesSourceRegistry.Builder builder) { @@ -94,16 +110,19 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { private String executionHint = null; private SubAggCollectionMode collectMode = null; private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds( - DEFAULT_BUCKET_COUNT_THRESHOLDS); + DEFAULT_BUCKET_COUNT_THRESHOLDS + ); private boolean showTermDocCountError = false; public TermsAggregationBuilder(String name) { super(name); } - protected TermsAggregationBuilder(TermsAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + protected TermsAggregationBuilder( + TermsAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.order = clone.order; this.executionHint = clone.executionHint; @@ -178,8 +197,7 @@ public int size() { */ public TermsAggregationBuilder shardSize(int shardSize) { if (shardSize <= 0) { - throw new IllegalArgumentException( - "[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]"); + throw new IllegalArgumentException("[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]"); } bucketCountThresholds.setShardSize(shardSize); return this; @@ -199,7 +217,8 @@ public int shardSize() { public TermsAggregationBuilder minDocCount(long minDocCount) { if (minDocCount < 0) { throw new IllegalArgumentException( - "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]"); + "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]" + ); } bucketCountThresholds.setMinDocCount(minDocCount); return this; @@ -219,7 +238,8 @@ public long minDocCount() { public TermsAggregationBuilder shardMinDocCount(long shardMinDocCount) { if (shardMinDocCount < 0) { throw new IllegalArgumentException( - "[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]"); + "[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]" + ); } bucketCountThresholds.setShardMinDocCount(shardMinDocCount); return this; @@ -238,7 +258,7 @@ public TermsAggregationBuilder order(BucketOrder order) { if (order == null) { throw new IllegalArgumentException("[order] must not be null: [" + name + "]"); } - if(order instanceof CompoundOrder || InternalOrder.isKeyOrder(order)) { + if (order instanceof CompoundOrder || InternalOrder.isKeyOrder(order)) { this.order = order; // if order already contains a tie-breaker we are good to go } else { // otherwise add a tie-breaker by using a compound order this.order = BucketOrder.compound(order); @@ -335,15 +355,28 @@ public BucketCardinality bucketCardinality() { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - TermsAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new TermsAggregatorFactory(name, config, order, includeExclude, executionHint, collectMode, - bucketCountThresholds, showTermDocCountError, context, parent, subFactoriesBuilder, metadata, - aggregatorSupplier); + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + TermsAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new TermsAggregatorFactory( + name, + config, + order, + includeExclude, + executionHint, + collectMode, + bucketCountThresholds, + showTermDocCountError, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override @@ -366,8 +399,15 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) @Override public int hashCode() { - return Objects.hash(super.hashCode(), bucketCountThresholds, collectMode, - executionHint, includeExclude, order, showTermDocCountError); + return Objects.hash( + super.hashCode(), + bucketCountThresholds, + collectMode, + executionHint, + includeExclude, + order, + showTermDocCountError + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java index 8031d57ab3430..2be52e16fcfd9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java @@ -6,7 +6,6 @@ * Side Public License, v 1. */ - package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.ElasticsearchException; @@ -67,8 +66,12 @@ public void writeTo(StreamOutput out) throws IOException { } public BucketCountThresholds(BucketCountThresholds bucketCountThresholds) { - this(bucketCountThresholds.minDocCount, bucketCountThresholds.shardMinDocCount, bucketCountThresholds.requiredSize, - bucketCountThresholds.shardSize); + this( + bucketCountThresholds.minDocCount, + bucketCountThresholds.shardMinDocCount, + bucketCountThresholds.requiredSize, + bucketCountThresholds.shardSize + ); } public void ensureValidity() { @@ -163,9 +166,9 @@ public boolean equals(Object obj) { } BucketCountThresholds other = (BucketCountThresholds) obj; return Objects.equals(requiredSize, other.requiredSize) - && Objects.equals(shardSize, other.shardSize) - && Objects.equals(minDocCount, other.minDocCount) - && Objects.equals(shardMinDocCount, other.shardMinDocCount); + && Objects.equals(shardSize, other.shardSize) + && Objects.equals(minDocCount, other.minDocCount) + && Objects.equals(shardMinDocCount, other.shardMinDocCount); } } @@ -176,9 +179,17 @@ public boolean equals(Object obj) { protected final Set aggsUsedForSorting; protected final SubAggCollectionMode collectMode; - public TermsAggregator(String name, AggregatorFactories factories, AggregationContext context, Aggregator parent, - BucketCountThresholds bucketCountThresholds, BucketOrder order, DocValueFormat format, SubAggCollectionMode collectMode, - Map metadata) throws IOException { + public TermsAggregator( + String name, + AggregatorFactories factories, + AggregationContext context, + Aggregator parent, + BucketCountThresholds bucketCountThresholds, + BucketOrder order, + DocValueFormat format, + SubAggCollectionMode collectMode, + Map metadata + ) throws IOException { super(name, factories, context, parent, metadata); this.bucketCountThresholds = bucketCountThresholds; this.order = order; @@ -239,7 +250,6 @@ private boolean subAggsNeedScore() { @Override protected boolean shouldDefer(Aggregator aggregator) { - return collectMode == SubAggCollectionMode.BREADTH_FIRST - && aggsUsedForSorting.contains(aggregator) == false; + return collectMode == SubAggCollectionMode.BREADTH_FIRST && aggsUsedForSorting.contains(aggregator) == false; } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index 09845c6aad031..c6c1b194c7289 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -45,13 +45,19 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory { static Boolean REMAP_GLOBAL_ORDS, COLLECT_SEGMENT_ORDS; static void registerAggregators(ValuesSourceRegistry.Builder builder) { - builder.register(TermsAggregationBuilder.REGISTRY_KEY, + builder.register( + TermsAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.KEYWORD, CoreValuesSourceType.IP), - TermsAggregatorFactory.bytesSupplier(), true); + TermsAggregatorFactory.bytesSupplier(), + true + ); - builder.register(TermsAggregationBuilder.REGISTRY_KEY, + builder.register( + TermsAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.NUMERIC), - TermsAggregatorFactory.numericSupplier(), true); + TermsAggregatorFactory.numericSupplier(), + true + ); } /** @@ -77,19 +83,21 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { private static TermsAggregatorSupplier bytesSupplier() { return new TermsAggregatorSupplier() { @Override - public Aggregator build(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - BucketOrder order, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - String executionHint, - AggregationContext context, - Aggregator parent, - SubAggCollectionMode subAggCollectMode, - boolean showTermDocCountError, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + public Aggregator build( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + BucketOrder order, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + String executionHint, + AggregationContext context, + Aggregator parent, + SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { ValuesSource valuesSource = valuesSourceConfig.getValuesSource(); ExecutionMode execution = null; if (executionHint != null) { @@ -108,15 +116,31 @@ public Aggregator build(String name, } if ((includeExclude != null) && (includeExclude.isRegexBased()) && valuesSourceConfig.format() != DocValueFormat.RAW) { - // TODO this exception message is not really accurate for the string case. It's really disallowing regex + formatter - throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style " - + "include/exclude settings as they can only be applied to string fields. Use an array of values for " - + "include/exclude clauses"); + // TODO this exception message is not really accurate for the string case. It's really disallowing regex + formatter + throw new AggregationExecutionException( + "Aggregation [" + + name + + "] cannot support regular expression style " + + "include/exclude settings as they can only be applied to string fields. Use an array of values for " + + "include/exclude clauses" + ); } // TODO: [Zach] we might want refactor and remove ExecutionMode#create(), moving that logic outside the enum - return execution.create(name, factories, valuesSourceConfig, order, bucketCountThresholds, includeExclude, - context, parent, subAggCollectMode, showTermDocCountError, cardinality, metadata); + return execution.create( + name, + factories, + valuesSourceConfig, + order, + bucketCountThresholds, + includeExclude, + context, + parent, + subAggCollectMode, + showTermDocCountError, + cardinality, + metadata + ); } }; @@ -129,24 +153,30 @@ public Aggregator build(String name, private static TermsAggregatorSupplier numericSupplier() { return new TermsAggregatorSupplier() { @Override - public Aggregator build(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - BucketOrder order, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - String executionHint, - AggregationContext context, - Aggregator parent, - SubAggCollectionMode subAggCollectMode, - boolean showTermDocCountError, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + public Aggregator build( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + BucketOrder order, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + String executionHint, + AggregationContext context, + Aggregator parent, + SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { if ((includeExclude != null) && (includeExclude.isRegexBased())) { - throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style " - + "include/exclude settings as they can only be applied to string fields. Use an array of numeric values for " - + "include/exclude clauses used to filter numeric fields"); + throw new AggregationExecutionException( + "Aggregation [" + + name + + "] cannot support regular expression style " + + "include/exclude settings as they can only be applied to string fields. Use an array of numeric values for " + + "include/exclude clauses used to filter numeric fields" + ); } if (subAggCollectMode == null) { @@ -167,8 +197,21 @@ public Aggregator build(String name, } resultStrategy = agg -> agg.new LongTermsResults(showTermDocCountError); } - return new NumericTermsAggregator(name, factories, resultStrategy, numericValuesSource, valuesSourceConfig.format(), order, - bucketCountThresholds, context, parent, subAggCollectMode, longFilter, cardinality, metadata); + return new NumericTermsAggregator( + name, + factories, + resultStrategy, + numericValuesSource, + valuesSourceConfig.format(), + order, + bucketCountThresholds, + context, + parent, + subAggCollectMode, + longFilter, + cardinality, + metadata + ); } }; } @@ -181,19 +224,21 @@ public Aggregator build(String name, private final TermsAggregator.BucketCountThresholds bucketCountThresholds; private final boolean showTermDocCountError; - TermsAggregatorFactory(String name, - ValuesSourceConfig config, - BucketOrder order, - IncludeExclude includeExclude, - String executionHint, - SubAggCollectionMode collectMode, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - boolean showTermDocCountError, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - TermsAggregatorSupplier aggregatorSupplier) throws IOException { + TermsAggregatorFactory( + String name, + ValuesSourceConfig config, + BucketOrder order, + IncludeExclude includeExclude, + String executionHint, + SubAggCollectionMode collectMode, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + boolean showTermDocCountError, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + TermsAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; this.order = order; @@ -206,8 +251,13 @@ public Aggregator build(String name, @Override protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - final InternalAggregation aggregation = new UnmappedTerms(name, order, bucketCountThresholds.getRequiredSize(), - bucketCountThresholds.getMinDocCount(), metadata); + final InternalAggregation aggregation = new UnmappedTerms( + name, + order, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata + ); Aggregator agg = new NonCollectingAggregator(name, context, parent, factories, metadata) { @Override public InternalAggregation buildEmptyAggregation() { @@ -231,11 +281,8 @@ private static boolean isAggregationSort(BucketOrder order) { } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(this.bucketCountThresholds); if (InternalOrder.isKeyOrder(order) == false && bucketCountThresholds.getShardSize() == TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { @@ -307,18 +354,20 @@ public enum ExecutionMode { MAP(new ParseField("map")) { @Override - Aggregator create(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - BucketOrder order, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - AggregationContext context, - Aggregator parent, - SubAggCollectionMode subAggCollectMode, - boolean showTermDocCountError, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + Aggregator create( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + BucketOrder order, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + AggregationContext context, + Aggregator parent, + SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(valuesSourceConfig.format()); @@ -343,17 +392,20 @@ Aggregator create(String name, GLOBAL_ORDINALS(new ParseField("global_ordinals")) { @Override - Aggregator create(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - BucketOrder order, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - AggregationContext context, Aggregator parent, - SubAggCollectionMode subAggCollectMode, - boolean showTermDocCountError, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + Aggregator create( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + BucketOrder order, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + AggregationContext context, + Aggregator parent, + SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { assert valuesSourceConfig.getValuesSource() instanceof ValuesSource.Bytes.WithOrdinals; ValuesSource.Bytes.WithOrdinals ordinalsValuesSource = (ValuesSource.Bytes.WithOrdinals) valuesSourceConfig @@ -396,12 +448,13 @@ Aggregator create(String name, } final double ratio = maxOrd / ((double) context.searcher().getIndexReader().numDocs()); - if (factories == AggregatorFactories.EMPTY && - includeExclude == null && - cardinality == CardinalityUpperBound.ONE && - ordinalsValuesSource.supportsGlobalOrdinalsMapping() && - // we use the static COLLECT_SEGMENT_ORDS to allow tests to force specific optimizations - (COLLECT_SEGMENT_ORDS!= null ? COLLECT_SEGMENT_ORDS.booleanValue() : ratio <= 0.5 && maxOrd <= 2048)) { + if (factories == AggregatorFactories.EMPTY + && includeExclude == null + && cardinality == CardinalityUpperBound.ONE + && ordinalsValuesSource.supportsGlobalOrdinalsMapping() + && + // we use the static COLLECT_SEGMENT_ORDS to allow tests to force specific optimizations + (COLLECT_SEGMENT_ORDS != null ? COLLECT_SEGMENT_ORDS.booleanValue() : ratio <= 0.5 && maxOrd <= 2048)) { /* * We can use the low cardinality execution mode iff this aggregator: * - has no sub-aggregator AND @@ -440,10 +493,10 @@ Aggregator create(String name, remapGlobalOrds = REMAP_GLOBAL_ORDS.booleanValue(); } else { remapGlobalOrds = true; - if (includeExclude == null && - cardinality == CardinalityUpperBound.ONE && - (factories == AggregatorFactories.EMPTY || - (isAggregationSort(order) == false && subAggCollectMode == SubAggCollectionMode.BREADTH_FIRST))) { + if (includeExclude == null + && cardinality == CardinalityUpperBound.ONE + && (factories == AggregatorFactories.EMPTY + || (isAggregationSort(order) == false && subAggCollectMode == SubAggCollectionMode.BREADTH_FIRST))) { /* * We don't need to remap global ords iff this aggregator: * - has no include/exclude rules AND @@ -451,7 +504,7 @@ Aggregator create(String name, * - has no sub-aggregator or only sub-aggregator that can be deferred * ({@link SubAggCollectionMode#BREADTH_FIRST}). */ - remapGlobalOrds = false; + remapGlobalOrds = false; } } return new GlobalOrdinalsStringTermsAggregator( @@ -492,18 +545,20 @@ public static ExecutionMode fromString(String value) { this.parseField = parseField; } - abstract Aggregator create(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - BucketOrder order, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - AggregationContext context, - Aggregator parent, - SubAggCollectionMode subAggCollectMode, - boolean showTermDocCountError, - CardinalityUpperBound cardinality, - Map metadata) throws IOException; + abstract Aggregator create( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + BucketOrder order, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + AggregationContext context, + Aggregator parent, + SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException; @Override public String toString() { @@ -520,11 +575,8 @@ public static SortedSetDocValues globalOrdsValues(AggregationContext context, Va return valuesSource.globalOrdinalsValues(reader.leaves().get(0)); } - public static LongPredicate gloabalOrdsFilter( - IncludeExclude includeExclude, - DocValueFormat format, - SortedSetDocValues values - ) throws IOException { + public static LongPredicate gloabalOrdsFilter(IncludeExclude includeExclude, DocValueFormat format, SortedSetDocValues values) + throws IOException { if (includeExclude == null) { return GlobalOrdinalsStringTermsAggregator.ALWAYS_TRUE; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorSupplier.java index bd51f942bfd5e..0d8d1d7a19045 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorSupplier.java @@ -18,17 +18,19 @@ import java.util.Map; interface TermsAggregatorSupplier { - Aggregator build(String name, - AggregatorFactories factories, - ValuesSourceConfig valuesSourceConfig, - BucketOrder order, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - IncludeExclude includeExclude, - String executionHint, - AggregationContext context, - Aggregator parent, - Aggregator.SubAggCollectionMode subAggCollectMode, - boolean showTermDocCountError, - CardinalityUpperBound cardinality, - Map metadata) throws IOException; + Aggregator build( + String name, + AggregatorFactories factories, + ValuesSourceConfig valuesSourceConfig, + BucketOrder order, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + String executionHint, + AggregationContext context, + Aggregator parent, + Aggregator.SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java index 6584a9e3bc0d6..8ba548e1c147b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java @@ -36,8 +36,15 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms { - private Bucket(BytesRef term, long subsetDf, long subsetSize, long supersetDf, long supersetSize, InternalAggregations aggregations, - DocValueFormat format) { + private Bucket( + BytesRef term, + long subsetDf, + long subsetSize, + long supersetDf, + long supersetSize, + InternalAggregations aggregations, + DocValueFormat format + ) { super(subsetDf, subsetSize, supersetDf, supersetSize, aggregations, format); } } @@ -84,8 +91,14 @@ protected UnmappedSignificantTerms create(long subsetSize, long supersetSize, Li } @Override - Bucket createBucket(long subsetDf, long subsetSize, long supersetDf, long supersetSize, - InternalAggregations aggregations, Bucket prototype) { + Bucket createBucket( + long subsetDf, + long subsetSize, + long supersetDf, + long supersetSize, + InternalAggregations aggregations, + Bucket prototype + ) { throw new UnsupportedOperationException("not supported for UnmappedSignificantTerms"); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java index a87680c215629..ed16a441a2f79 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java @@ -33,8 +33,13 @@ public class UnmappedTerms extends InternalTerms { - private Bucket(long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError, - DocValueFormat formatter) { + private Bucket( + long docCount, + InternalAggregations aggregations, + boolean showDocCountError, + long docCountError, + DocValueFormat formatter + ) { super(docCount, aggregations, showDocCountError, docCountError, formatter); } } @@ -101,8 +106,7 @@ public final XContentBuilder doXContentBody(XContentBuilder builder, Params para } @Override - protected void setDocCountError(long docCountError) { - } + protected void setDocCountError(long docCountError) {} @Override protected int getShardSize() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/ChiSquare.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/ChiSquare.java index 41f24303c19ac..11b404de6de2e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/ChiSquare.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/ChiSquare.java @@ -6,10 +6,8 @@ * Side Public License, v 1. */ - package org.elasticsearch.search.aggregations.bucket.terms.heuristic; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -19,7 +17,9 @@ public class ChiSquare extends NXYSignificanceHeuristic { public static final String NAME = "chi_square"; public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, buildFromParsedArgs(ChiSquare::new)); + NAME, + buildFromParsedArgs(ChiSquare::new) + ); static { NXYSignificanceHeuristic.declareParseFields(PARSER); } @@ -62,8 +62,8 @@ public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long if (includeNegatives == false && frequencies.N11 / frequencies.N_1 < frequencies.N10 / frequencies.N_0) { return Double.NEGATIVE_INFINITY; } - return (frequencies.N * Math.pow((frequencies.N11 * frequencies.N00 - frequencies.N01 * frequencies.N10), 2.0) / - ((frequencies.N_1) * (frequencies.N1_) * (frequencies.N0_) * (frequencies.N_0))); + return (frequencies.N * Math.pow((frequencies.N11 * frequencies.N00 - frequencies.N01 * frequencies.N10), 2.0) / ((frequencies.N_1) + * (frequencies.N1_) * (frequencies.N0_) * (frequencies.N_0))); } @Override @@ -93,4 +93,3 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/GND.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/GND.java index 2d3aeb0d2ff40..86b1548b2a761 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/GND.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/GND.java @@ -6,10 +6,8 @@ * Side Public License, v 1. */ - package org.elasticsearch.search.aggregations.bucket.terms.heuristic; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -80,10 +78,9 @@ public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long // perfect co-occurrence return 1.0; } - double score = (Math.max(Math.log(fx), Math.log(fy)) - Math.log(fxy)) / - (Math.log(N) - Math.min(Math.log(fx), Math.log(fy))); + double score = (Math.max(Math.log(fx), Math.log(fy)) - Math.log(fxy)) / (Math.log(N) - Math.min(Math.log(fx), Math.log(fy))); - //we must invert the order of terms because GND scores relevant terms low + // we must invert the order of terms because GND scores relevant terms low score = Math.exp(-1.0d * score); return score; } @@ -115,4 +112,3 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/JLHScore.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/JLHScore.java index 3d85a2834af38..6230963c50976 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/JLHScore.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/JLHScore.java @@ -6,23 +6,20 @@ * Side Public License, v 1. */ - package org.elasticsearch.search.aggregations.bucket.terms.heuristic; - -import java.io.IOException; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; +import java.io.IOException; + public class JLHScore extends SignificanceHeuristic { public static final String NAME = "jlh"; public static final ObjectParser PARSER = new ObjectParser<>(NAME, JLHScore::new); - public JLHScore() { - } + public JLHScore() {} /** * Read from a stream. @@ -32,8 +29,7 @@ public JLHScore(StreamInput in) { } @Override - public void writeTo(StreamOutput out) throws IOException { - } + public void writeTo(StreamOutput out) throws IOException {} @Override public String getWriteableName() { @@ -113,4 +109,3 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/MutualInformation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/MutualInformation.java index 662cf9cd4a3ea..257f019014e87 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/MutualInformation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/MutualInformation.java @@ -6,10 +6,8 @@ * Side Public License, v 1. */ - package org.elasticsearch.search.aggregations.bucket.terms.heuristic; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -19,7 +17,9 @@ public class MutualInformation extends NXYSignificanceHeuristic { public static final String NAME = "mutual_information"; public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, buildFromParsedArgs(MutualInformation::new)); + NAME, + buildFromParsedArgs(MutualInformation::new) + ); static { NXYSignificanceHeuristic.declareParseFields(PARSER); } @@ -37,7 +37,6 @@ public MutualInformation(StreamInput in) throws IOException { super(in); } - @Override public boolean equals(Object other) { if ((other instanceof MutualInformation) == false) { @@ -61,11 +60,17 @@ public int hashCode() { public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize) { Frequencies frequencies = computeNxys(subsetFreq, subsetSize, supersetFreq, supersetSize, "MutualInformation"); - double score = (getMITerm(frequencies.N00, frequencies.N0_, frequencies.N_0, frequencies.N) + - getMITerm(frequencies.N01, frequencies.N0_, frequencies.N_1, frequencies.N) + - getMITerm(frequencies.N10, frequencies.N1_, frequencies.N_0, frequencies.N) + - getMITerm(frequencies.N11, frequencies.N1_, frequencies.N_1, frequencies.N)) - / log2; + double score = (getMITerm(frequencies.N00, frequencies.N0_, frequencies.N_0, frequencies.N) + getMITerm( + frequencies.N01, + frequencies.N0_, + frequencies.N_1, + frequencies.N + ) + getMITerm(frequencies.N10, frequencies.N1_, frequencies.N_0, frequencies.N) + getMITerm( + frequencies.N11, + frequencies.N1_, + frequencies.N_1, + frequencies.N + )) / log2; if (Double.isNaN(score)) { score = Double.NEGATIVE_INFINITY; @@ -127,4 +132,3 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/NXYSignificanceHeuristic.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/NXYSignificanceHeuristic.java index 7647b1f68b7f8..bdfa72813ecef 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/NXYSignificanceHeuristic.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/NXYSignificanceHeuristic.java @@ -6,14 +6,12 @@ * Side Public License, v 1. */ - package org.elasticsearch.search.aggregations.bucket.terms.heuristic; - -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -28,8 +26,10 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic { protected static final ParseField INCLUDE_NEGATIVES_FIELD = new ParseField("include_negatives"); - protected static final String SCORE_ERROR_MESSAGE = ", does your background filter not include all documents in the bucket? " + - "If so and it is intentional, set \"" + BACKGROUND_IS_SUPERSET.getPreferredName() + "\": false"; + protected static final String SCORE_ERROR_MESSAGE = ", does your background filter not include all documents in the bucket? " + + "If so and it is intentional, set \"" + + BACKGROUND_IS_SUPERSET.getPreferredName() + + "\": false"; protected final boolean backgroundIsSuperset; @@ -61,17 +61,12 @@ public void writeTo(StreamOutput out) throws IOException { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; NXYSignificanceHeuristic other = (NXYSignificanceHeuristic) obj; - if (backgroundIsSuperset != other.backgroundIsSuperset) - return false; - if (includeNegatives != other.includeNegatives) - return false; + if (backgroundIsSuperset != other.backgroundIsSuperset) return false; + if (includeNegatives != other.includeNegatives) return false; return true; } @@ -90,42 +85,42 @@ protected Frequencies computeNxys(long subsetFreq, long subsetSize, long superse checkFrequencies(subsetFreq, subsetSize, supersetFreq, supersetSize, scoreFunctionName); Frequencies frequencies = new Frequencies(); if (backgroundIsSuperset) { - //documents not in class and do not contain term + // documents not in class and do not contain term frequencies.N00 = supersetSize - supersetFreq - (subsetSize - subsetFreq); - //documents in class and do not contain term + // documents in class and do not contain term frequencies.N01 = (subsetSize - subsetFreq); // documents not in class and do contain term frequencies.N10 = supersetFreq - subsetFreq; // documents in class and do contain term frequencies.N11 = subsetFreq; - //documents that do not contain term + // documents that do not contain term frequencies.N0_ = supersetSize - supersetFreq; - //documents that contain term + // documents that contain term frequencies.N1_ = supersetFreq; - //documents that are not in class + // documents that are not in class frequencies.N_0 = supersetSize - subsetSize; - //documents that are in class + // documents that are in class frequencies.N_1 = subsetSize; - //all docs + // all docs frequencies.N = supersetSize; } else { - //documents not in class and do not contain term + // documents not in class and do not contain term frequencies.N00 = supersetSize - supersetFreq; - //documents in class and do not contain term + // documents in class and do not contain term frequencies.N01 = subsetSize - subsetFreq; // documents not in class and do contain term frequencies.N10 = supersetFreq; // documents in class and do contain term frequencies.N11 = subsetFreq; - //documents that do not contain term + // documents that do not contain term frequencies.N0_ = supersetSize - supersetFreq + subsetSize - subsetFreq; - //documents that contain term + // documents that contain term frequencies.N1_ = supersetFreq + subsetFreq; - //documents that are not in class + // documents that are not in class frequencies.N_0 = supersetSize; - //documents that are in class + // documents that are in class frequencies.N_1 = subsetSize; - //all docs + // all docs frequencies.N = supersetSize + subsetSize; } return frequencies; @@ -147,8 +142,8 @@ protected void checkFrequencies(long subsetFreq, long subsetSize, long supersetF } protected void build(XContentBuilder builder) throws IOException { - builder.field(INCLUDE_NEGATIVES_FIELD.getPreferredName(), includeNegatives).field(BACKGROUND_IS_SUPERSET.getPreferredName(), - backgroundIsSuperset); + builder.field(INCLUDE_NEGATIVES_FIELD.getPreferredName(), includeNegatives) + .field(BACKGROUND_IS_SUPERSET.getPreferredName(), backgroundIsSuperset); } /** @@ -181,8 +176,7 @@ public NXYBuilder(boolean includeNegatives, boolean backgroundIsSuperset) { protected void build(XContentBuilder builder) throws IOException { builder.field(INCLUDE_NEGATIVES_FIELD.getPreferredName(), includeNegatives) - .field(BACKGROUND_IS_SUPERSET.getPreferredName(), backgroundIsSuperset); + .field(BACKGROUND_IS_SUPERSET.getPreferredName(), backgroundIsSuperset); } } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/PercentageScore.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/PercentageScore.java index 5b2b83e0db108..dc22a8e49e8f1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/PercentageScore.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/PercentageScore.java @@ -6,10 +6,8 @@ * Side Public License, v 1. */ - package org.elasticsearch.search.aggregations.bucket.terms.heuristic; - import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -24,16 +22,14 @@ public class PercentageScore extends SignificanceHeuristic { public static final String NAME = "percentage"; public static final ObjectParser PARSER = new ObjectParser<>(NAME, PercentageScore::new); - public PercentageScore() { - } + public PercentageScore() {} public PercentageScore(StreamInput in) { // Nothing to read. } @Override - public void writeTo(StreamOutput out) throws IOException { - } + public void writeTo(StreamOutput out) throws IOException {} @Override public String getWriteableName() { @@ -46,12 +42,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static SignificanceHeuristic parse(XContentParser parser) - throws IOException, QueryShardException { + public static SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryShardException { // move to the closing bracket if (parser.nextToken().equals(XContentParser.Token.END_OBJECT) == false) { - throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, " + - "but got [{}] instead", parser.currentToken()); + throw new ElasticsearchParseException( + "failed to parse [percentage] significance heuristic. expected an empty object, " + "but got [{}] instead", + parser.currentToken() + ); } return new PercentageScore(); } @@ -92,4 +89,3 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/ScriptHeuristic.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/ScriptHeuristic.java index 0887ee869c5f3..89728ae7816ba 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/ScriptHeuristic.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/ScriptHeuristic.java @@ -6,10 +6,8 @@ * Side Public License, v 1. */ - package org.elasticsearch.search.aggregations.bucket.terms.heuristic; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -28,8 +26,10 @@ public class ScriptHeuristic extends SignificanceHeuristic { public static final String NAME = "script_heuristic"; - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> - new ScriptHeuristic((Script) args[0])); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + args -> new ScriptHeuristic((Script) args[0]) + ); static { Script.declareScript(PARSER, constructorArg()); } @@ -67,7 +67,7 @@ public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long subsetDfHolder.value = subsetFreq; supersetDfHolder.value = supersetFreq; return executableScript.execute(params); - } + } } public ScriptHeuristic(Script script) { @@ -88,8 +88,8 @@ public void writeTo(StreamOutput out) throws IOException { @Override public SignificanceHeuristic rewrite(InternalAggregation.ReduceContext context) { - SignificantTermsHeuristicScoreScript.Factory factory = context.scriptService().compile(script, - SignificantTermsHeuristicScoreScript.CONTEXT); + SignificantTermsHeuristicScoreScript.Factory factory = context.scriptService() + .compile(script, SignificantTermsHeuristicScoreScript.CONTEXT); return new ExecutableScriptHeuristic(script, factory.newInstance()); } @@ -99,7 +99,6 @@ public SignificanceHeuristic rewrite(AggregationContext context) { return new ExecutableScriptHeuristic(script, compiledScript.newInstance()); } - /** * Calculates score with a script * @@ -111,8 +110,9 @@ public SignificanceHeuristic rewrite(AggregationContext context) { */ @Override public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize) { - throw new UnsupportedOperationException("This scoring heuristic must have 'rewrite' called on it to provide a version ready " + - "for use"); + throw new UnsupportedOperationException( + "This scoring heuristic must have 'rewrite' called on it to provide a version ready " + "for use" + ); } @Override @@ -148,10 +148,12 @@ public boolean equals(Object obj) { public final class LongAccessor extends Number { public long value; + @Override public int intValue() { - return (int)value; + return (int) value; } + @Override public long longValue() { return value; @@ -173,4 +175,3 @@ public String toString() { } } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/SignificanceHeuristic.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/SignificanceHeuristic.java index 3119300fc89ab..cfc14069f082e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/SignificanceHeuristic.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/SignificanceHeuristic.java @@ -27,11 +27,17 @@ public abstract class SignificanceHeuristic implements NamedWriteable, ToXConten */ public abstract double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize); - protected void checkFrequencyValidity(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize, - String scoreFunctionName) { + protected void checkFrequencyValidity( + long subsetFreq, + long subsetSize, + long supersetFreq, + long supersetSize, + String scoreFunctionName + ) { if (subsetFreq < 0 || subsetSize < 0 || supersetFreq < 0 || supersetSize < 0) { - throw new IllegalArgumentException("Frequencies of subset and superset must be positive in " + scoreFunctionName + - ".getScore()"); + throw new IllegalArgumentException( + "Frequencies of subset and superset must be positive in " + scoreFunctionName + ".getScore()" + ); } if (subsetFreq > subsetSize) { throw new IllegalArgumentException("subsetFreq > subsetSize, in " + scoreFunctionName); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/SignificanceHeuristicBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/SignificanceHeuristicBuilder.java index 17dd7cfc27af5..02dcc9cbbbada 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/SignificanceHeuristicBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/SignificanceHeuristicBuilder.java @@ -6,11 +6,8 @@ * Side Public License, v 1. */ - package org.elasticsearch.search.aggregations.bucket.terms.heuristic; import org.elasticsearch.common.xcontent.ToXContentFragment; - -public interface SignificanceHeuristicBuilder extends ToXContentFragment { -} +public interface SignificanceHeuristicBuilder extends ToXContentFragment {} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java index 5a8568960a018..1f8de8d246e82 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java @@ -11,10 +11,10 @@ import org.HdrHistogram.DoubleHistogram; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -39,9 +39,17 @@ private static int indexOfKey(double[] keys, double key) { protected final int numberOfSignificantValueDigits; protected final boolean keyed; - AbstractHDRPercentilesAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, - double[] keys, int numberOfSignificantValueDigits, boolean keyed, DocValueFormat formatter, - Map metadata) throws IOException { + AbstractHDRPercentilesAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] keys, + int numberOfSignificantValueDigits, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { super(name, context, parent, metadata); this.valuesSource = valuesSource; this.keyed = keyed; @@ -57,12 +65,11 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } - final SortedNumericDoubleValues values = ((ValuesSource.Numeric)valuesSource).doubleValues(ctx); + final SortedNumericDoubleValues values = ((ValuesSource.Numeric) valuesSource).doubleValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long bucket) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLog.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLog.java index 04fb0cf2d8bb7..b4fc6843fd16c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLog.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLog.java @@ -21,6 +21,7 @@ public abstract class AbstractHyperLogLog extends AbstractCardinalityAlgorithm { private static final int BIAS_K = 6; // these static tables come from the appendix of the paper + // @formatter:off private static final double[][] RAW_ESTIMATE_DATA = { // precision 4 { 11, 11.717, 12.207, 12.7896, 13.2882, 13.8204, 14.3772, 14.9342, 15.5202, 16.161, 16.7722, 17.4636, 18.0396, 18.6766, 19.3566, @@ -688,6 +689,7 @@ public abstract class AbstractHyperLogLog extends AbstractCardinalityAlgorithm { -404.317000000039, -528.898999999976, -506.621000000043, -513.205000000075, -479.351000000024, -596.139999999898, -527.016999999993, -664.681000000099, -680.306000000099, -704.050000000047, -850.486000000034, -757.43200000003, -713.308999999892, } }; + // @formatter:on private static final long[] THRESHOLDS = new long[] { 10, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java index 2394ff7939e9f..edd1b42668697 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHyperLogLogPlusPlus.java @@ -10,8 +10,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Releasable; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.core.Releasable; import java.io.IOException; import java.util.HashMap; @@ -49,7 +49,6 @@ public AbstractHyperLogLogPlusPlus(int precision) { /** Collect a value in the given bucket */ public abstract void collect(long bucketOrd, long hash); - /** Clone the data structure at the given bucket */ public AbstractHyperLogLogPlusPlus clone(long bucketOrd, BigArrays bigArrays) { if (getAlgorithm(bucketOrd) == LINEAR_COUNTING) { @@ -105,7 +104,7 @@ public void writeTo(long bucket, StreamOutput out) throws IOException { } else { out.writeBoolean(HYPERLOGLOG); AbstractHyperLogLog.RunLenIterator iterator = getHyperLogLog(bucket); - while (iterator.next()){ + while (iterator.next()) { out.writeByte(iterator.value()); } } @@ -117,7 +116,7 @@ public static AbstractHyperLogLogPlusPlus readFrom(StreamInput in, BigArrays big if (algorithm == LINEAR_COUNTING) { // we use a sparse structure for linear counting final long size = in.readVLong(); - HyperLogLogPlusPlusSparse counts = new HyperLogLogPlusPlusSparse(precision, bigArrays, 1); + HyperLogLogPlusPlusSparse counts = new HyperLogLogPlusPlusSparse(precision, bigArrays, 1); counts.ensureCapacity(0, size); for (long i = 0; i < size; ++i) { counts.addEncoded(0, in.readInt()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java index 6ae2e45244173..bf45092acd2ed 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java @@ -30,8 +30,14 @@ abstract class AbstractInternalHDRPercentiles extends InternalNumericMetricsAggr protected final DoubleHistogram state; protected final boolean keyed; - AbstractInternalHDRPercentiles(String name, double[] keys, DoubleHistogram state, boolean keyed, DocValueFormat format, - Map metadata) { + AbstractInternalHDRPercentiles( + String name, + double[] keys, + DoubleHistogram state, + boolean keyed, + DocValueFormat format, + Map metadata + ) { super(name, metadata); this.keys = keys; this.state = state; @@ -126,19 +132,24 @@ public AbstractInternalHDRPercentiles reduce(List aggregati return createReduced(getName(), keys, merged, keyed, getMetadata()); } - protected abstract AbstractInternalHDRPercentiles createReduced(String name, double[] keys, DoubleHistogram merged, boolean keyed, - Map metadata); + protected abstract AbstractInternalHDRPercentiles createReduced( + String name, + double[] keys, + DoubleHistogram merged, + boolean keyed, + Map metadata + ); @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { if (keyed) { builder.startObject(CommonFields.VALUES.getPreferredName()); - for(int i = 0; i < keys.length; ++i) { + for (int i = 0; i < keys.length; ++i) { String key = String.valueOf(keys[i]); double value = value(keys[i]); builder.field(key, state.getTotalCount() == 0 ? null : value); if (format != DocValueFormat.RAW && state.getTotalCount() > 0) { - builder.field(key + "_as_string", format.format(value).toString()); + builder.field(key + "_as_string", format.format(value).toString()); } } builder.endObject(); @@ -166,9 +177,7 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; AbstractInternalHDRPercentiles that = (AbstractInternalHDRPercentiles) obj; - return keyed == that.keyed - && Arrays.equals(keys, that.keys) - && Objects.equals(state, that.state); + return keyed == that.keyed && Arrays.equals(keys, that.keys) && Objects.equals(state, that.state); } @Override @@ -176,10 +185,12 @@ public int hashCode() { // we cannot use state.hashCode at the moment because of: // https://github.com/HdrHistogram/HdrHistogram/issues/81 // TODO: upgrade the HDRHistogram library - return Objects.hash(super.hashCode(), + return Objects.hash( + super.hashCode(), keyed, Arrays.hashCode(keys), state.getIntegerToDoubleValueConversionRatio(), - state.getTotalCount()); + state.getTotalCount() + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java index d6030d403aa0d..a4fd0a5d37009 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java @@ -27,8 +27,14 @@ abstract class AbstractInternalTDigestPercentiles extends InternalNumericMetrics protected final TDigestState state; final boolean keyed; - AbstractInternalTDigestPercentiles(String name, double[] keys, TDigestState state, boolean keyed, DocValueFormat formatter, - Map metadata) { + AbstractInternalTDigestPercentiles( + String name, + double[] keys, + TDigestState state, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) { super(name, metadata); this.keys = keys; this.state = state; @@ -109,14 +115,19 @@ public AbstractInternalTDigestPercentiles reduce(List aggre return createReduced(getName(), keys, merged, keyed, getMetadata()); } - protected abstract AbstractInternalTDigestPercentiles createReduced(String name, double[] keys, TDigestState merged, boolean keyed, - Map metadata); + protected abstract AbstractInternalTDigestPercentiles createReduced( + String name, + double[] keys, + TDigestState merged, + boolean keyed, + Map metadata + ); @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { if (keyed) { builder.startObject(CommonFields.VALUES.getPreferredName()); - for(int i = 0; i < keys.length; ++i) { + for (int i = 0; i < keys.length; ++i) { String key = String.valueOf(keys[i]); double value = value(keys[i]); builder.field(key, state.size() == 0 ? null : value); @@ -149,9 +160,7 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; AbstractInternalTDigestPercentiles that = (AbstractInternalTDigestPercentiles) obj; - return keyed == that.keyed - && Arrays.equals(keys, that.keys) - && Objects.equals(state, that.state); + return keyed == that.keyed && Arrays.equals(keys, that.keys) && Objects.equals(state, that.state); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java index f33a74e48a803..d66ea0d8d713e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java @@ -8,14 +8,14 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.Version; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.Nullable; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; @@ -32,8 +32,8 @@ * It provides a set of common fields/functionality for setting the available algorithms (TDigest and HDRHistogram), * as well as algorithm-specific settings via a {@link PercentilesConfig} object */ -public abstract class AbstractPercentilesAggregationBuilder> - extends ValuesSourceAggregationBuilder.LeafOnly { +public abstract class AbstractPercentilesAggregationBuilder> extends + ValuesSourceAggregationBuilder.LeafOnly { public static final ParseField KEYED_FIELD = new ParseField("keyed"); private final ParseField valuesField; @@ -41,10 +41,12 @@ public abstract class AbstractPercentilesAggregationBuilder> ConstructingObjectParser createParser(String aggName, - TriFunction ctor, - Supplier defaultConfig, - ParseField valuesField) { + public static > ConstructingObjectParser createParser( + String aggName, + TriFunction ctor, + Supplier defaultConfig, + ParseField valuesField + ) { /** * This is a non-ideal ConstructingObjectParser, because it is a compromise between Percentiles and Ranks. @@ -97,16 +99,21 @@ public static > ConstructingO ValuesSourceAggregationBuilder.declareFields(parser, true, true, false); parser.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), valuesField); parser.declareBoolean(T::keyed, KEYED_FIELD); - parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), PercentilesMethod.TDIGEST_PARSER, - PercentilesMethod.TDIGEST.getParseField()); - parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), PercentilesMethod.HDR_PARSER, - PercentilesMethod.HDR.getParseField()); + parser.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + PercentilesMethod.TDIGEST_PARSER, + PercentilesMethod.TDIGEST.getParseField() + ); + parser.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + PercentilesMethod.HDR_PARSER, + PercentilesMethod.HDR.getParseField() + ); return parser; } - AbstractPercentilesAggregationBuilder(String name, double[] values, PercentilesConfig percentilesConfig, - ParseField valuesField) { + AbstractPercentilesAggregationBuilder(String name, double[] values, PercentilesConfig percentilesConfig, ParseField valuesField) { super(name); if (values == null) { throw new IllegalArgumentException("[" + valuesField.getPreferredName() + "] must not be null: [" + name + "]"); @@ -121,8 +128,11 @@ public static > ConstructingO this.valuesField = valuesField; } - AbstractPercentilesAggregationBuilder(AbstractPercentilesAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, Map metadata) { + AbstractPercentilesAggregationBuilder( + AbstractPercentilesAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.percentilesConfig = clone.percentilesConfig; this.keyed = clone.keyed; @@ -136,8 +146,7 @@ public static > ConstructingO values = in.readDoubleArray(); keyed = in.readBoolean(); if (in.getVersion().onOrAfter(Version.V_7_8_0)) { - percentilesConfig - = (PercentilesConfig) in.readOptionalWriteable((Reader) PercentilesConfig::fromStream); + percentilesConfig = (PercentilesConfig) in.readOptionalWriteable((Reader) PercentilesConfig::fromStream); } else { int numberOfSignificantValueDigits = in.readVInt(); double compression = in.readDouble(); @@ -153,14 +162,14 @@ protected void innerWriteTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_7_8_0)) { out.writeOptionalWriteable(percentilesConfig); } else { - // Legacy method serialized both SigFigs and compression, even though we only need one. So we need + // Legacy method serialized both SigFigs and compression, even though we only need one. So we need // to serialize the default for the unused method int numberOfSignificantValueDigits = percentilesConfig.getMethod().equals(PercentilesMethod.HDR) - ? ((PercentilesConfig.Hdr)percentilesConfig).getNumberOfSignificantValueDigits() + ? ((PercentilesConfig.Hdr) percentilesConfig).getNumberOfSignificantValueDigits() : PercentilesConfig.Hdr.DEFAULT_NUMBER_SIG_FIGS; double compression = percentilesConfig.getMethod().equals(PercentilesMethod.TDIGEST) - ? ((PercentilesConfig.TDigest)percentilesConfig).getCompression() + ? ((PercentilesConfig.TDigest) percentilesConfig).getCompression() : PercentilesConfig.TDigest.DEFAULT_COMPRESSION; out.writeVInt(numberOfSignificantValueDigits); @@ -198,8 +207,9 @@ public T numberOfSignificantValueDigits(int numberOfSignificantValueDigits) { if (percentilesConfig == null || percentilesConfig.getMethod().equals(PercentilesMethod.HDR)) { percentilesConfig = new PercentilesConfig.Hdr(numberOfSignificantValueDigits); } else { - throw new IllegalArgumentException("Cannot set [numberOfSignificantValueDigits] because the method " + - "has already been configured for TDigest"); + throw new IllegalArgumentException( + "Cannot set [numberOfSignificantValueDigits] because the method " + "has already been configured for TDigest" + ); } return (T) this; @@ -215,7 +225,7 @@ public T numberOfSignificantValueDigits(int numberOfSignificantValueDigits) { @Deprecated public int numberOfSignificantValueDigits() { if (percentilesConfig != null && percentilesConfig.getMethod().equals(PercentilesMethod.HDR)) { - return ((PercentilesConfig.Hdr)percentilesConfig).getNumberOfSignificantValueDigits(); + return ((PercentilesConfig.Hdr) percentilesConfig).getNumberOfSignificantValueDigits(); } throw new IllegalStateException("Percentiles [method] has not been configured yet, or is a TDigest"); } @@ -248,12 +258,11 @@ public T compression(double compression) { @Deprecated public double compression() { if (percentilesConfig != null && percentilesConfig.getMethod().equals(PercentilesMethod.TDIGEST)) { - return ((PercentilesConfig.TDigest)percentilesConfig).getCompression(); + return ((PercentilesConfig.TDigest) percentilesConfig).getCompression(); } throw new IllegalStateException("Percentiles [method] has not been configured yet, or is a HdrHistogram"); } - /** * Deprecated: set method by configuring a {@link PercentilesConfig} instead * and set via {@link PercentilesAggregationBuilder#percentilesConfig(PercentilesConfig)} @@ -265,7 +274,7 @@ public T method(PercentilesMethod method) { throw new IllegalArgumentException("[method] must not be null: [" + name + "]"); } if (percentilesConfig == null) { - if (method.equals(PercentilesMethod.TDIGEST) ) { + if (method.equals(PercentilesMethod.TDIGEST)) { this.percentilesConfig = new PercentilesConfig.TDigest(); } else { this.percentilesConfig = new PercentilesConfig.Hdr(); @@ -273,7 +282,7 @@ public T method(PercentilesMethod method) { } else if (percentilesConfig.getMethod().equals(method) == false) { // we already have an algo configured, but it's different from the requested method // reset to default for the requested method - if (method.equals(PercentilesMethod.TDIGEST) ) { + if (method.equals(PercentilesMethod.TDIGEST)) { this.percentilesConfig = new PercentilesConfig.TDigest(); } else { this.percentilesConfig = new PercentilesConfig.Hdr(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractTDigestPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractTDigestPercentilesAggregator.java index 0c9b5aca79e0c..40e32ce7199d2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractTDigestPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractTDigestPercentilesAggregator.java @@ -10,10 +10,10 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -38,9 +38,17 @@ private static int indexOfKey(double[] keys, double key) { protected final double compression; protected final boolean keyed; - AbstractTDigestPercentilesAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, - double[] keys, double compression, boolean keyed, DocValueFormat formatter, - Map metadata) throws IOException { + AbstractTDigestPercentilesAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] keys, + double compression, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { super(name, context, parent, metadata); this.valuesSource = valuesSource; this.keyed = keyed; @@ -56,12 +64,11 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } - final SortedNumericDoubleValues values = ((ValuesSource.Numeric)valuesSource).doubleValues(ctx); + final SortedNumericDoubleValues values = ((ValuesSource.Numeric) valuesSource).doubleValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long bucket) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java index 10d430ac75411..7b578298eacb4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java @@ -73,11 +73,13 @@ protected void innerWriteTo(StreamOutput out) { } @Override - protected AvgAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - MetricAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + protected AvgAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + MetricAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); return new AvgAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregator.java index b18b3da8296a9..fd1e9733febc9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregator.java @@ -9,10 +9,10 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -35,8 +35,13 @@ class AvgAggregator extends NumericMetricsAggregator.SingleValue { DoubleArray compensations; DocValueFormat format; - AvgAggregator(String name, ValuesSourceConfig valuesSourceConfig, AggregationContext context, - Aggregator parent, Map metadata) throws IOException { + AvgAggregator( + String name, + ValuesSourceConfig valuesSourceConfig, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, context, parent, metadata); // TODO Stop expecting nulls here this.valuesSource = valuesSourceConfig.hasValues() ? (ValuesSource.Numeric) valuesSourceConfig.getValuesSource() : null; @@ -55,8 +60,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorFactory.java index 925bf0fe85fac..964b7eac98776 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorFactory.java @@ -25,20 +25,26 @@ class AvgAggregatorFactory extends ValuesSourceAggregatorFactory { private final MetricAggregatorSupplier aggregatorSupplier; - AvgAggregatorFactory(String name, ValuesSourceConfig config, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, MetricAggregatorSupplier aggregatorSupplier) throws IOException { + AvgAggregatorFactory( + String name, + ValuesSourceConfig config, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + MetricAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; } - static void registerAggregators(ValuesSourceRegistry.Builder builder) { builder.register( AvgAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN), AvgAggregator::new, - true); + true + ); } @Override @@ -47,11 +53,8 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { return aggregatorSupplier.build(name, config, context, parent, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java index d45f4276d6e56..c492ed5f43930 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java @@ -9,10 +9,10 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -29,8 +29,9 @@ import java.util.Map; import java.util.Objects; -public final class CardinalityAggregationBuilder - extends ValuesSourceAggregationBuilder.LeafOnly { +public final class CardinalityAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly< + ValuesSource, + CardinalityAggregationBuilder> { public static final String NAME = "cardinality"; public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = @@ -39,8 +40,10 @@ public final class CardinalityAggregationBuilder private static final ParseField REHASH = new ParseField("rehash").withAllDeprecated("no replacement - values will always be rehashed"); public static final ParseField PRECISION_THRESHOLD_FIELD = new ParseField("precision_threshold"); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, CardinalityAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + CardinalityAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, false, false); PARSER.declareLong(CardinalityAggregationBuilder::precisionThreshold, CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD); @@ -57,9 +60,11 @@ public CardinalityAggregationBuilder(String name) { super(name); } - public CardinalityAggregationBuilder(CardinalityAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + public CardinalityAggregationBuilder( + CardinalityAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.precisionThreshold = clone.precisionThreshold; } @@ -105,7 +110,8 @@ protected boolean serializeTargetValueType(Version version) { public CardinalityAggregationBuilder precisionThreshold(long precisionThreshold) { if (precisionThreshold < 0) { throw new IllegalArgumentException( - "[precisionThreshold] must be greater than or equal to 0. Found [" + precisionThreshold + "] in [" + name + "]"); + "[precisionThreshold] must be greater than or equal to 0. Found [" + precisionThreshold + "] in [" + name + "]" + ); } this.precisionThreshold = precisionThreshold; return this; @@ -121,14 +127,24 @@ public Long precisionThreshold() { } @Override - protected CardinalityAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - CardinalityAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - - return new CardinalityAggregatorFactory(name, config, precisionThreshold, context, parent, - subFactoriesBuilder, metadata, aggregatorSupplier); + protected CardinalityAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + CardinalityAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + + return new CardinalityAggregatorFactory( + name, + config, + precisionThreshold, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java index 072e67922a602..dfcd924bf5a5f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java @@ -8,9 +8,7 @@ package org.elasticsearch.search.aggregations.metrics; -import java.io.IOException; -import java.util.Map; -import java.util.function.BiConsumer; +import com.carrotsearch.hppc.BitMixer; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; @@ -19,14 +17,14 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.RamUsageEstimator; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.hash.MurmurHash3; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.aggregations.Aggregator; @@ -36,7 +34,9 @@ import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; -import com.carrotsearch.hppc.BitMixer; +import java.io.IOException; +import java.util.Map; +import java.util.function.BiConsumer; /** * An aggregator that computes approximate counts of unique values. @@ -59,12 +59,13 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue private int stringHashingCollectorsUsed; public CardinalityAggregator( - String name, - ValuesSourceConfig valuesSourceConfig, - int precision, - AggregationContext context, - Aggregator parent, - Map metadata) throws IOException { + String name, + ValuesSourceConfig valuesSourceConfig, + int precision, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, context, parent, metadata); // TODO: Stop using nulls here this.valuesSource = valuesSourceConfig.hasValues() ? valuesSourceConfig.getValuesSource() : null; @@ -85,8 +86,9 @@ private Collector pickCollector(LeafReaderContext ctx) throws IOException { if (valuesSource instanceof ValuesSource.Numeric) { ValuesSource.Numeric source = (ValuesSource.Numeric) valuesSource; - MurmurHash3Values hashValues = source.isFloatingPoint() ? - MurmurHash3Values.hash(source.doubleValues(ctx)) : MurmurHash3Values.hash(source.longValues(ctx)); + MurmurHash3Values hashValues = source.isFloatingPoint() + ? MurmurHash3Values.hash(source.doubleValues(ctx)) + : MurmurHash3Values.hash(source.longValues(ctx)); numericCollectorsUsed++; return new DirectCollector(counts, hashValues); } @@ -115,8 +117,7 @@ private Collector pickCollector(LeafReaderContext ctx) throws IOException { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { postCollectLastCollector(); collector = pickCollector(ctx); @@ -248,8 +249,7 @@ public static long memoryOverhead(long maxOrd) { private final HyperLogLogPlusPlus counts; private ObjectArray visitedOrds; - OrdinalsCollector(HyperLogLogPlusPlus counts, SortedSetDocValues values, - BigArrays bigArrays) { + OrdinalsCollector(HyperLogLogPlusPlus counts, SortedSetDocValues values, BigArrays bigArrays) { if (values.getValueCount() > Integer.MAX_VALUE) { throw new IllegalArgumentException(); } @@ -287,8 +287,9 @@ public void postCollect() throws IOException { try (LongArray hashes = bigArrays.newLongArray(maxOrd, false)) { final MurmurHash3.Hash128 hash = new MurmurHash3.Hash128(); - for (long ord = allVisitedOrds.nextSetBit(0); ord < Long.MAX_VALUE; - ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : Long.MAX_VALUE) { + for (long ord = allVisitedOrds.nextSetBit(0); ord < Long.MAX_VALUE; ord = ord + 1 < maxOrd + ? allVisitedOrds.nextSetBit(ord + 1) + : Long.MAX_VALUE) { final BytesRef value = values.lookupOrd(ord); MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, hash); hashes.set(ord, hash.h1); @@ -297,8 +298,9 @@ public void postCollect() throws IOException { for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final BitArray bits = visitedOrds.get(bucket); if (bits != null) { - for (long ord = bits.nextSetBit(0); ord < Long.MAX_VALUE; - ord = ord + 1 < maxOrd ? bits.nextSetBit(ord + 1) : Long.MAX_VALUE) { + for (long ord = bits.nextSetBit(0); ord < Long.MAX_VALUE; ord = ord + 1 < maxOrd + ? bits.nextSetBit(ord + 1) + : Long.MAX_VALUE) { counts.collect(bucket, hashes.get(ord)); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorFactory.java index 0623a9f6133d5..a2ead2bfb1b60 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorFactory.java @@ -29,13 +29,16 @@ class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory { private final Long precisionThreshold; private final CardinalityAggregatorSupplier aggregatorSupplier; - CardinalityAggregatorFactory(String name, ValuesSourceConfig config, - Long precisionThreshold, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - CardinalityAggregatorSupplier aggregatorSupplier) throws IOException { + CardinalityAggregatorFactory( + String name, + ValuesSourceConfig config, + Long precisionThreshold, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + CardinalityAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; @@ -43,7 +46,8 @@ class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory { } public static void registerAggregators(ValuesSourceRegistry.Builder builder) { - builder.register(CardinalityAggregationBuilder.REGISTRY_KEY, + builder.register( + CardinalityAggregationBuilder.REGISTRY_KEY, CoreValuesSourceType.ALL_CORE, (name, valuesSourceConfig, precision, context, parent, metadata) -> { // check global ords @@ -53,19 +57,27 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { final ValuesSource.Bytes.WithOrdinals source = (ValuesSource.Bytes.WithOrdinals) valuesSource; if (useGlobalOrds(context, source, precision)) { final long maxOrd = source.globalMaxOrd(context.searcher()); - return new GlobalOrdCardinalityAggregator(name, source, precision, Math.toIntExact(maxOrd), - context, parent, metadata); + return new GlobalOrdCardinalityAggregator( + name, + source, + precision, + Math.toIntExact(maxOrd), + context, + parent, + metadata + ); } } } // fallback in the default aggregator return new CardinalityAggregator(name, valuesSourceConfig, precision, context, parent, metadata); - }, true); + }, + true + ); } - private static boolean useGlobalOrds(AggregationContext context, - ValuesSource.Bytes.WithOrdinals source, - int precision) throws IOException { + private static boolean useGlobalOrds(AggregationContext context, ValuesSource.Bytes.WithOrdinals source, int precision) + throws IOException { final List leaves = context.searcher().getIndexReader().leaves(); // we compute the total number of terms across all segments long total = 0; @@ -85,17 +97,14 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { return aggregatorSupplier.build(name, config, precision(), context, parent, metadata); } private int precision() { return precisionThreshold == null - ? HyperLogLogPlusPlus.DEFAULT_PRECISION - : HyperLogLogPlusPlus.precisionFromThreshold(precisionThreshold); + ? HyperLogLogPlusPlus.DEFAULT_PRECISION + : HyperLogLogPlusPlus.precisionFromThreshold(precisionThreshold); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorSupplier.java index d8cf11ffcef16..c664ad8ae7e29 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorSupplier.java @@ -16,11 +16,13 @@ import java.util.Map; public interface CardinalityAggregatorSupplier { - Aggregator build(String name, - ValuesSourceConfig valuesSourceConfig, - int precision, - AggregationContext context, - Aggregator parent, - Map metadata) throws IOException; + Aggregator build( + String name, + ValuesSourceConfig valuesSourceConfig, + int precision, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CompensatedSum.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CompensatedSum.java index fb3f3cd9764b5..4058a5290fc4d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CompensatedSum.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CompensatedSum.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.metrics; - /** * Used to calculate sums using the Kahan summation algorithm. * @@ -85,6 +84,4 @@ public CompensatedSum add(double value, double delta) { return this; } - } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java index f3fb9b937bcbb..cbfef411954d6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java @@ -72,7 +72,6 @@ public interface ExtendedStats extends Stats { */ String getStdDeviationBoundAsString(Bounds bound); - /** * The sum of the squares of the collected values as a String. */ @@ -94,7 +93,12 @@ public interface ExtendedStats extends Stats { String getVarianceSamplingAsString(); enum Bounds { - UPPER, LOWER, UPPER_POPULATION, LOWER_POPULATION, UPPER_SAMPLING, LOWER_SAMPLING + UPPER, + LOWER, + UPPER_POPULATION, + LOWER_POPULATION, + UPPER_SAMPLING, + LOWER_SAMPLING } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java index be04cc86bab12..e2730b8192b1c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java @@ -27,14 +27,17 @@ import java.util.Map; import java.util.Objects; -public class ExtendedStatsAggregationBuilder - extends ValuesSourceAggregationBuilder.LeafOnly { +public class ExtendedStatsAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly< + ValuesSource.Numeric, + ExtendedStatsAggregationBuilder> { public static final String NAME = "extended_stats"; public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>(NAME, ExtendedStatsAggregatorProvider.class); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, ExtendedStatsAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + ExtendedStatsAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); PARSER.declareDouble(ExtendedStatsAggregationBuilder::sigma, ExtendedStatsAggregator.SIGMA_FIELD); @@ -43,14 +46,18 @@ public class ExtendedStatsAggregationBuilder public static void registerAggregators(ValuesSourceRegistry.Builder builder) { ExtendedStatsAggregatorFactory.registerAggregators(builder); } + private double sigma = 2.0; public ExtendedStatsAggregationBuilder(String name) { super(name); } - protected ExtendedStatsAggregationBuilder(ExtendedStatsAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, Map metadata) { + protected ExtendedStatsAggregationBuilder( + ExtendedStatsAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.sigma = clone.sigma; } @@ -91,13 +98,14 @@ public double sigma() { } @Override - protected ExtendedStatsAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - ExtendedStatsAggregatorProvider aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new ExtendedStatsAggregatorFactory(name, config, sigma, context, parent, - subFactoriesBuilder, metadata, aggregatorSupplier); + protected ExtendedStatsAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + ExtendedStatsAggregatorProvider aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new ExtendedStatsAggregatorFactory(name, config, sigma, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregator.java index 90a93dd5d5e4a..b746ab44fd0ce 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregator.java @@ -9,11 +9,11 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.common.xcontent.ParseField; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.xcontent.ParseField; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -76,8 +76,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } @@ -150,38 +149,64 @@ public boolean hasMetric(String name) { @Override public double metric(String name, long owningBucketOrd) { if (valuesSource == null || owningBucketOrd >= counts.size()) { - switch(InternalExtendedStats.Metrics.resolve(name)) { - case count: return 0; - case sum: return 0; - case min: return Double.POSITIVE_INFINITY; - case max: return Double.NEGATIVE_INFINITY; - case avg: return Double.NaN; - case sum_of_squares: return 0; - case variance: return Double.NaN; - case variance_population: return Double.NaN; - case variance_sampling: return Double.NaN; - case std_deviation: return Double.NaN; - case std_deviation_population: return Double.NaN; - case std_deviation_sampling: return Double.NaN; - case std_upper: return Double.NaN; - case std_lower: return Double.NaN; + switch (InternalExtendedStats.Metrics.resolve(name)) { + case count: + return 0; + case sum: + return 0; + case min: + return Double.POSITIVE_INFINITY; + case max: + return Double.NEGATIVE_INFINITY; + case avg: + return Double.NaN; + case sum_of_squares: + return 0; + case variance: + return Double.NaN; + case variance_population: + return Double.NaN; + case variance_sampling: + return Double.NaN; + case std_deviation: + return Double.NaN; + case std_deviation_population: + return Double.NaN; + case std_deviation_sampling: + return Double.NaN; + case std_upper: + return Double.NaN; + case std_lower: + return Double.NaN; default: throw new IllegalArgumentException("Unknown value [" + name + "] in common stats aggregation"); } } - switch(InternalExtendedStats.Metrics.resolve(name)) { - case count: return counts.get(owningBucketOrd); - case sum: return sums.get(owningBucketOrd); - case min: return mins.get(owningBucketOrd); - case max: return maxes.get(owningBucketOrd); - case avg: return sums.get(owningBucketOrd) / counts.get(owningBucketOrd); - case sum_of_squares: return sumOfSqrs.get(owningBucketOrd); - case variance: return variance(owningBucketOrd); - case variance_population: return variancePopulation(owningBucketOrd); - case variance_sampling: return varianceSampling(owningBucketOrd); - case std_deviation: return Math.sqrt(variance(owningBucketOrd)); - case std_deviation_population: return Math.sqrt(variance(owningBucketOrd)); - case std_deviation_sampling: return Math.sqrt(varianceSampling(owningBucketOrd)); + switch (InternalExtendedStats.Metrics.resolve(name)) { + case count: + return counts.get(owningBucketOrd); + case sum: + return sums.get(owningBucketOrd); + case min: + return mins.get(owningBucketOrd); + case max: + return maxes.get(owningBucketOrd); + case avg: + return sums.get(owningBucketOrd) / counts.get(owningBucketOrd); + case sum_of_squares: + return sumOfSqrs.get(owningBucketOrd); + case variance: + return variance(owningBucketOrd); + case variance_population: + return variancePopulation(owningBucketOrd); + case variance_sampling: + return varianceSampling(owningBucketOrd); + case std_deviation: + return Math.sqrt(variance(owningBucketOrd)); + case std_deviation_population: + return Math.sqrt(variance(owningBucketOrd)); + case std_deviation_sampling: + return Math.sqrt(varianceSampling(owningBucketOrd)); case std_upper: return (sums.get(owningBucketOrd) / counts.get(owningBucketOrd)) + (Math.sqrt(variance(owningBucketOrd)) * this.sigma); case std_lower: @@ -199,14 +224,14 @@ private double variancePopulation(long owningBucketOrd) { double sum = sums.get(owningBucketOrd); long count = counts.get(owningBucketOrd); double variance = (sumOfSqrs.get(owningBucketOrd) - ((sum * sum) / count)) / count; - return variance < 0 ? 0 : variance; + return variance < 0 ? 0 : variance; } private double varianceSampling(long owningBucketOrd) { double sum = sums.get(owningBucketOrd); long count = counts.get(owningBucketOrd); double variance = (sumOfSqrs.get(owningBucketOrd) - ((sum * sum) / count)) / (count - 1); - return variance < 0 ? 0 : variance; + return variance < 0 ? 0 : variance; } @Override @@ -214,9 +239,17 @@ public InternalAggregation buildAggregation(long bucket) { if (valuesSource == null || bucket >= counts.size()) { return buildEmptyAggregation(); } - return new InternalExtendedStats(name, counts.get(bucket), sums.get(bucket), - mins.get(bucket), maxes.get(bucket), sumOfSqrs.get(bucket), sigma, format, - metadata()); + return new InternalExtendedStats( + name, + counts.get(bucket), + sums.get(bucket), + mins.get(bucket), + maxes.get(bucket), + sumOfSqrs.get(bucket), + sigma, + format, + metadata() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorFactory.java index b33ca91a71c69..23292bf67db16 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorFactory.java @@ -27,14 +27,16 @@ class ExtendedStatsAggregatorFactory extends ValuesSourceAggregatorFactory { private final ExtendedStatsAggregatorProvider aggregatorSupplier; private final double sigma; - ExtendedStatsAggregatorFactory(String name, - ValuesSourceConfig config, - double sigma, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - ExtendedStatsAggregatorProvider aggregatorSupplier) throws IOException { + ExtendedStatsAggregatorFactory( + String name, + ValuesSourceConfig config, + double sigma, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + ExtendedStatsAggregatorProvider aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.sigma = sigma; this.aggregatorSupplier = aggregatorSupplier; @@ -45,7 +47,8 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { ExtendedStatsAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN), ExtendedStatsAggregator::new, - true); + true + ); } @Override @@ -54,11 +57,8 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { return aggregatorSupplier.build(name, config, context, parent, sigma, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorProvider.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorProvider.java index 80c4fe5ba34c1..b9a6655fff4d0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorProvider.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorProvider.java @@ -16,10 +16,12 @@ public interface ExtendedStatsAggregatorProvider { - Aggregator build(String name, - ValuesSourceConfig valuesSourceConfig, - AggregationContext context, - Aggregator parent, - double sigma, - Map metadata) throws IOException; + Aggregator build( + String name, + ValuesSourceConfig valuesSourceConfig, + AggregationContext context, + Aggregator parent, + double sigma, + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java index 96af8d9cb3ba2..f8b5f80cd934a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java @@ -28,11 +28,15 @@ public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = "geo_bounds"; - public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = - new ValuesSourceRegistry.RegistryKey<>(NAME, GeoBoundsAggregatorSupplier.class); - - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, GeoBoundsAggregationBuilder::new); + public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>( + NAME, + GeoBoundsAggregatorSupplier.class + ); + + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + GeoBoundsAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, false, false, false); PARSER.declareBoolean(GeoBoundsAggregationBuilder::wrapLongitude, GeoBoundsAggregator.WRAP_LONGITUDE_FIELD); @@ -48,9 +52,11 @@ public GeoBoundsAggregationBuilder(String name) { super(name); } - protected GeoBoundsAggregationBuilder(GeoBoundsAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + protected GeoBoundsAggregationBuilder( + GeoBoundsAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.wrapLongitude = clone.wrapLongitude; } @@ -99,13 +105,23 @@ public BucketCardinality bucketCardinality() { } @Override - protected GeoBoundsAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - GeoBoundsAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new GeoBoundsAggregatorFactory(name, config, wrapLongitude, context, parent, - subFactoriesBuilder, metadata, aggregatorSupplier); + protected GeoBoundsAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + GeoBoundsAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new GeoBoundsAggregatorFactory( + name, + config, + wrapLongitude, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregator.java index 5811d36c0c4cb..6cf7c5f3dd856 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregator.java @@ -9,10 +9,10 @@ package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.xcontent.ParseField; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -67,8 +67,7 @@ final class GeoBoundsAggregator extends MetricsAggregator { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - LeafBucketCollector sub) { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } @@ -149,8 +148,17 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) { @Override public InternalAggregation buildEmptyAggregation() { - return new InternalGeoBounds(name, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, - Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, wrapLongitude, metadata()); + return new InternalGeoBounds( + name, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.NEGATIVE_INFINITY, + wrapLongitude, + metadata() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java index 20439a09d554b..0dc3948c3cd7f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java @@ -26,14 +26,16 @@ class GeoBoundsAggregatorFactory extends ValuesSourceAggregatorFactory { private final GeoBoundsAggregatorSupplier aggregatorSupplier; private final boolean wrapLongitude; - GeoBoundsAggregatorFactory(String name, - ValuesSourceConfig config, - boolean wrapLongitude, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - GeoBoundsAggregatorSupplier aggregatorSupplier) throws IOException { + GeoBoundsAggregatorFactory( + String name, + ValuesSourceConfig config, + boolean wrapLongitude, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + GeoBoundsAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.wrapLongitude = wrapLongitude; this.aggregatorSupplier = aggregatorSupplier; @@ -45,11 +47,8 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { return aggregatorSupplier.build(name, context, parent, config, wrapLongitude, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroid.java index b7eb33b513781..b5b5d2f186462 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroid.java @@ -16,5 +16,6 @@ */ public interface GeoCentroid extends Aggregation { GeoPoint centroid(); + long count(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java index 7e75171e51479..e30b36bad52da 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java @@ -26,16 +26,19 @@ import java.io.IOException; import java.util.Map; -public class GeoCentroidAggregationBuilder - extends ValuesSourceAggregationBuilder.LeafOnly { +public class GeoCentroidAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly< + ValuesSource.GeoPoint, + GeoCentroidAggregationBuilder> { public static final String NAME = "geo_centroid"; public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>( NAME, MetricAggregatorSupplier.class ); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, GeoCentroidAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + GeoCentroidAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, false, false); } @@ -48,9 +51,11 @@ public GeoCentroidAggregationBuilder(String name) { super(name); } - protected GeoCentroidAggregationBuilder(GeoCentroidAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + protected GeoCentroidAggregationBuilder( + GeoCentroidAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -77,13 +82,14 @@ protected void innerWriteTo(StreamOutput out) { } @Override - protected GeoCentroidAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - MetricAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new GeoCentroidAggregatorFactory(name, config, context, parent, - subFactoriesBuilder, metadata, aggregatorSupplier); + protected GeoCentroidAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + MetricAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new GeoCentroidAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregator.java index ce958b5b67e99..7182271612fd4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregator.java @@ -10,9 +10,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -87,9 +87,9 @@ public void collect(int doc, long bucket) throws IOException { // update the sum for (int i = 0; i < valueCount; ++i) { GeoPoint value = values.nextValue(); - //latitude + // latitude compensatedSumLat.add(value.getLat()); - //longitude + // longitude compensatedSumLon.add(value.getLon()); } lonSum.set(bucket, compensatedSumLon.value()); @@ -108,9 +108,9 @@ public InternalAggregation buildAggregation(long bucket) { } final long bucketCount = counts.get(bucket); final GeoPoint bucketCentroid = (bucketCount > 0) - ? new GeoPoint(latSum.get(bucket) / bucketCount, lonSum.get(bucket) / bucketCount) - : null; - return new InternalGeoCentroid(name, bucketCentroid , bucketCount, metadata()); + ? new GeoPoint(latSum.get(bucket) / bucketCount, lonSum.get(bucket) / bucketCount) + : null; + return new InternalGeoCentroid(name, bucketCentroid, bucketCount, metadata()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorFactory.java index 44249ffb94af1..0b4fbe433a788 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorFactory.java @@ -24,13 +24,15 @@ class GeoCentroidAggregatorFactory extends ValuesSourceAggregatorFactory { private final MetricAggregatorSupplier aggregatorSupplier; - GeoCentroidAggregatorFactory(String name, - ValuesSourceConfig config, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - MetricAggregatorSupplier aggregatorSupplier) throws IOException { + GeoCentroidAggregatorFactory( + String name, + ValuesSourceConfig config, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + MetricAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; } @@ -41,11 +43,8 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { return aggregatorSupplier.build(name, config, context, parent, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java index fd5981cabeb54..889734f1d3fd0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java @@ -8,26 +8,26 @@ package org.elasticsearch.search.aggregations.metrics; -import java.io.IOException; -import java.util.Map; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.hash.MurmurHash3; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; +import java.io.IOException; +import java.util.Map; + /** * An aggregator that computes approximate counts of unique values * using global ords. @@ -45,15 +45,15 @@ public class GlobalOrdCardinalityAggregator extends NumericMetricsAggregator.Sin private SortedSetDocValues values; private ObjectArray visitedOrds; - public GlobalOrdCardinalityAggregator( - String name, - ValuesSource.Bytes.WithOrdinals valuesSource, - int precision, - int maxOrd, - AggregationContext context, - Aggregator parent, - Map metadata) throws IOException { + String name, + ValuesSource.Bytes.WithOrdinals valuesSource, + int precision, + int maxOrd, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, context, parent, metadata); this.valuesSource = valuesSource; this.precision = precision; @@ -68,8 +68,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { values = valuesSource.globalOrdinalsValues(ctx); return new LeafBucketCollector() { @Override @@ -101,8 +100,9 @@ protected void doPostCollection() throws IOException { } final MurmurHash3.Hash128 hash = new MurmurHash3.Hash128(); - for (long ord = allVisitedOrds.nextSetBit(0); ord < Long.MAX_VALUE; - ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : Long.MAX_VALUE) { + for (long ord = allVisitedOrds.nextSetBit(0); ord < Long.MAX_VALUE; ord = ord + 1 < maxOrd + ? allVisitedOrds.nextSetBit(ord + 1) + : Long.MAX_VALUE) { final BytesRef value = values.lookupOrd(ord); MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, hash); hashes.set(ord, hash.h1); @@ -113,8 +113,9 @@ protected void doPostCollection() throws IOException { if (bits != null) { visitedOrds.set(bucket, null); // remove bitset from array counts.ensureCapacity(bucket, bits.cardinality()); - for (long ord = bits.nextSetBit(0); ord < Long.MAX_VALUE; - ord = ord + 1 < maxOrd ? bits.nextSetBit(ord + 1) : Long.MAX_VALUE) { + for (long ord = bits.nextSetBit(0); ord < Long.MAX_VALUE; ord = ord + 1 < maxOrd + ? bits.nextSetBit(ord + 1) + : Long.MAX_VALUE) { counts.collect(bucket, hashes.get(ord)); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregator.java index 32dacf2d5b2eb..5f6c68dc024b2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregator.java @@ -19,9 +19,17 @@ class HDRPercentileRanksAggregator extends AbstractHDRPercentilesAggregator { - HDRPercentileRanksAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, - double[] percents, int numberOfSignificantValueDigits, boolean keyed, DocValueFormat format, - Map metadata) throws IOException { + HDRPercentileRanksAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] percents, + int numberOfSignificantValueDigits, + boolean keyed, + DocValueFormat format, + Map metadata + ) throws IOException { super(name, valuesSource, context, parent, percents, numberOfSignificantValueDigits, keyed, format, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregator.java index 5dbb4c1f2a324..fe0eee731e662 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregator.java @@ -19,8 +19,17 @@ class HDRPercentilesAggregator extends AbstractHDRPercentilesAggregator { - HDRPercentilesAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, double[] percents, - int numberOfSignificantValueDigits, boolean keyed, DocValueFormat formatter, Map metadata) throws IOException { + HDRPercentilesAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] percents, + int numberOfSignificantValueDigits, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { super(name, valuesSource, context, parent, percents, numberOfSignificantValueDigits, keyed, formatter, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java index 1d385dc1ef864..5f05080f6c04d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java @@ -10,13 +10,13 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.packed.PackedInts; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import java.nio.ByteBuffer; import java.nio.ByteOrder; @@ -205,10 +205,9 @@ private static class HyperLogLog extends AbstractHyperLogLog implements Releasab // array for holding the runlens. private ByteArray runLens; - HyperLogLog(BigArrays bigArrays, long initialBucketCount, int precision) { super(precision); - this.runLens = bigArrays.newByteArray(initialBucketCount << precision); + this.runLens = bigArrays.newByteArray(initialBucketCount << precision); this.bigArrays = bigArrays; this.iterator = new HyperLogLogIterator(this, precision, m); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java index e4e183b80ba66..8cbca678bb633 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparse.java @@ -8,11 +8,11 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; /** * AbstractHyperLogLogPlusPlus instance that only supports linear counting. The maximum number of hashes supported @@ -180,8 +180,7 @@ private static class LinearCountingIterator implements AbstractLinearCounting.Ha int size, value; private long pos; - LinearCountingIterator() { - } + LinearCountingIterator() {} void reset(IntArray values, int size) { this.values = values; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java index 52350cec6fb58..c59b49b42f2dc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java @@ -107,8 +107,8 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; InternalAvg other = (InternalAvg) obj; - return Objects.equals(sum, other.sum) && - Objects.equals(count, other.count) && - Objects.equals(format.getWriteableName(), other.format.getWriteableName()); + return Objects.equals(sum, other.sum) + && Objects.equals(count, other.count) + && Objects.equals(format.getWriteableName(), other.format.getWriteableName()); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java index 68975c9082946..9d29b584bc9ce 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java @@ -78,8 +78,7 @@ public InternalAggregation reduce(List aggregations, Reduce final InternalCardinality cardinality = (InternalCardinality) aggregation; if (cardinality.counts != null) { if (reduced == null) { - reduced = new HyperLogLogPlusPlus(cardinality.counts.precision(), - BigArrays.NON_RECYCLING_INSTANCE, 1); + reduced = new HyperLogLogPlusPlus(cardinality.counts.precision(), BigArrays.NON_RECYCLING_INSTANCE, 1); } reduced.merge(0, cardinality.counts, 0); } @@ -119,4 +118,3 @@ AbstractHyperLogLogPlusPlus getState() { return counts; } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java index 70235c2499068..f839df8e29055 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java @@ -25,9 +25,24 @@ public class InternalExtendedStats extends InternalStats implements ExtendedStats { enum Metrics { - count, sum, min, max, avg, sum_of_squares, variance, variance_population, variance_sampling, - std_deviation, std_deviation_population, std_deviation_sampling, std_upper, std_lower, std_upper_population, std_lower_population, - std_upper_sampling, std_lower_sampling; + count, + sum, + min, + max, + avg, + sum_of_squares, + variance, + variance_population, + variance_sampling, + std_deviation, + std_deviation_population, + std_deviation_sampling, + std_upper, + std_lower, + std_upper_population, + std_lower_population, + std_upper_sampling, + std_lower_sampling; public static Metrics resolve(String name) { return Metrics.valueOf(name); @@ -41,8 +56,17 @@ public static Metrics resolve(String name) { private final double sumOfSqrs; private final double sigma; - public InternalExtendedStats(String name, long count, double sum, double min, double max, double sumOfSqrs, double sigma, - DocValueFormat formatter, Map metadata) { + public InternalExtendedStats( + String name, + long count, + double sum, + double min, + double max, + double sumOfSqrs, + double sigma, + DocValueFormat formatter, + Map metadata + ) { super(name, count, sum, min, max, formatter, metadata); this.sumOfSqrs = sumOfSqrs; this.sigma = sigma; @@ -133,14 +157,14 @@ public double getVariance() { @Override public double getVariancePopulation() { - double variance = (sumOfSqrs - ((sum * sum) / count)) / count; - return variance < 0 ? 0 : variance; + double variance = (sumOfSqrs - ((sum * sum) / count)) / count; + return variance < 0 ? 0 : variance; } @Override public double getVarianceSampling() { - double variance = (sumOfSqrs - ((sum * sum) / count)) / (count - 1); - return variance < 0 ? 0 : variance; + double variance = (sumOfSqrs - ((sum * sum) / count)) / (count - 1); + return variance < 0 ? 0 : variance; } @Override @@ -251,8 +275,17 @@ public InternalExtendedStats reduce(List aggregations, Redu } } final InternalStats stats = super.reduce(aggregations, reduceContext); - return new InternalExtendedStats(name, stats.getCount(), stats.getSum(), stats.getMin(), stats.getMax(), sumOfSqrs, sigma, - format, getMetadata()); + return new InternalExtendedStats( + name, + stats.getCount(), + stats.getSum(), + stats.getMin(), + stats.getMax(), + sumOfSqrs, + sigma, + format, + getMetadata() + ); } static class Fields { @@ -353,7 +386,6 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalExtendedStats other = (InternalExtendedStats) obj; - return Double.compare(sumOfSqrs, other.sumOfSqrs) == 0 && - Double.compare(sigma, other.sigma) == 0; + return Double.compare(sumOfSqrs, other.sumOfSqrs) == 0 && Double.compare(sigma, other.sigma) == 0; } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java index 3f96a266a16a6..088d326ec1a11 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java @@ -29,8 +29,17 @@ public class InternalGeoBounds extends InternalAggregation implements GeoBounds public final double negRight; public final boolean wrapLongitude; - public InternalGeoBounds(String name, double top, double bottom, double posLeft, double posRight, - double negLeft, double negRight, boolean wrapLongitude, Map metadata) { + public InternalGeoBounds( + String name, + double top, + double bottom, + double posLeft, + double posRight, + double negLeft, + double negRight, + boolean wrapLongitude, + Map metadata + ) { super(name, metadata); this.top = top; this.bottom = bottom; @@ -118,39 +127,39 @@ public Object getProperty(List path) { GeoBoundingBox geoBoundingBox = resolveGeoBoundingBox(); String bBoxSide = path.get(0); switch (bBoxSide) { - case "top": - return geoBoundingBox.top(); - case "left": - return geoBoundingBox.left(); - case "bottom": - return geoBoundingBox.bottom(); - case "right": - return geoBoundingBox.right(); - default: - throw new IllegalArgumentException("Found unknown path element [" + bBoxSide + "] in [" + getName() + "]"); + case "top": + return geoBoundingBox.top(); + case "left": + return geoBoundingBox.left(); + case "bottom": + return geoBoundingBox.bottom(); + case "right": + return geoBoundingBox.right(); + default: + throw new IllegalArgumentException("Found unknown path element [" + bBoxSide + "] in [" + getName() + "]"); } } else if (path.size() == 2) { GeoBoundingBox geoBoundingBox = resolveGeoBoundingBox(); GeoPoint cornerPoint = null; String cornerString = path.get(0); switch (cornerString) { - case "top_left": - cornerPoint = geoBoundingBox.topLeft(); - break; - case "bottom_right": - cornerPoint = geoBoundingBox.bottomRight(); - break; - default: - throw new IllegalArgumentException("Found unknown path element [" + cornerString + "] in [" + getName() + "]"); + case "top_left": + cornerPoint = geoBoundingBox.topLeft(); + break; + case "bottom_right": + cornerPoint = geoBoundingBox.bottomRight(); + break; + default: + throw new IllegalArgumentException("Found unknown path element [" + cornerString + "] in [" + getName() + "]"); } String latLonString = path.get(1); switch (latLonString) { - case "lat": - return cornerPoint.lat(); - case "lon": - return cornerPoint.lon(); - default: - throw new IllegalArgumentException("Found unknown path element [" + latLonString + "] in [" + getName() + "]"); + case "lat": + return cornerPoint.lat(); + case "lon": + return cornerPoint.lon(); + default: + throw new IllegalArgumentException("Found unknown path element [" + latLonString + "] in [" + getName() + "]"); } } else { throw new IllegalArgumentException("path not supported for [" + getName() + "]: " + path); @@ -215,13 +224,13 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalGeoBounds other = (InternalGeoBounds) obj; - return top == other.top && - bottom == other.bottom && - posLeft == other.posLeft && - posRight == other.posRight && - negLeft == other.negLeft && - negRight == other.negRight && - wrapLongitude == other.wrapLongitude; + return top == other.top + && bottom == other.bottom + && posLeft == other.posLeft + && posRight == other.posRight + && negLeft == other.negLeft + && negRight == other.negRight + && wrapLongitude == other.wrapLongitude; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java index ed75ea4bc75f9..736bd79c6e16c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java @@ -10,10 +10,10 @@ import org.apache.lucene.geo.GeoEncodingUtils; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -30,8 +30,9 @@ public class InternalGeoCentroid extends InternalAggregation implements GeoCentr private final long count; public static long encodeLatLon(double lat, double lon) { - return (Integer.toUnsignedLong(GeoEncodingUtils.encodeLatitude(lat)) << 32) | - Integer.toUnsignedLong(GeoEncodingUtils.encodeLongitude(lon)); + return (Integer.toUnsignedLong(GeoEncodingUtils.encodeLatitude(lat)) << 32) | Integer.toUnsignedLong( + GeoEncodingUtils.encodeLongitude(lon) + ); } public static double decodeLatitude(long encodedLatLon) { @@ -118,7 +119,7 @@ public InternalGeoCentroid reduce(List aggregations, Reduce } } } - final GeoPoint result = (Double.isNaN(lonSum)) ? null : new GeoPoint(latSum/totalCount, lonSum/totalCount); + final GeoPoint result = (Double.isNaN(lonSum)) ? null : new GeoPoint(latSum / totalCount, lonSum / totalCount); return new InternalGeoCentroid(name, result, totalCount, getMetadata()); } @@ -177,8 +178,7 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; InternalGeoCentroid that = (InternalGeoCentroid) obj; - return count == that.count && - Objects.equals(centroid, that.centroid); + return count == that.count && Objects.equals(centroid, that.centroid); } @Override @@ -188,9 +188,6 @@ public int hashCode() { @Override public String toString() { - return "InternalGeoCentroid{" + - "centroid=" + centroid + - ", count=" + count + - '}'; + return "InternalGeoCentroid{" + "centroid=" + centroid + ", count=" + count + '}'; } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentileRanks.java index c1acf0d83dac2..a307b67c77d97 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentileRanks.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentileRanks.java @@ -18,8 +18,14 @@ public class InternalHDRPercentileRanks extends AbstractInternalHDRPercentiles implements PercentileRanks { public static final String NAME = "hdr_percentile_ranks"; - public InternalHDRPercentileRanks(String name, double[] cdfValues, DoubleHistogram state, boolean keyed, DocValueFormat formatter, - Map metadata) { + public InternalHDRPercentileRanks( + String name, + double[] cdfValues, + DoubleHistogram state, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) { super(name, cdfValues, state, keyed, formatter, metadata); } @@ -56,8 +62,13 @@ public double value(double key) { } @Override - protected AbstractInternalHDRPercentiles createReduced(String name, double[] keys, DoubleHistogram merged, boolean keyed, - Map metadata) { + protected AbstractInternalHDRPercentiles createReduced( + String name, + double[] keys, + DoubleHistogram merged, + boolean keyed, + Map metadata + ) { return new InternalHDRPercentileRanks(name, keys, merged, keyed, format, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentiles.java index bda526391b501..83ba92991764c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentiles.java @@ -18,8 +18,14 @@ public class InternalHDRPercentiles extends AbstractInternalHDRPercentiles implements Percentiles { public static final String NAME = "hdr_percentiles"; - public InternalHDRPercentiles(String name, double[] percents, DoubleHistogram state, boolean keyed, DocValueFormat formatter, - Map metadata) { + public InternalHDRPercentiles( + String name, + double[] percents, + DoubleHistogram state, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) { super(name, percents, state, keyed, formatter, metadata); } @@ -59,8 +65,13 @@ public double value(double key) { } @Override - protected AbstractInternalHDRPercentiles createReduced(String name, double[] keys, DoubleHistogram merged, boolean keyed, - Map metadata) { + protected AbstractInternalHDRPercentiles createReduced( + String name, + double[] keys, + DoubleHistogram merged, + boolean keyed, + Map metadata + ) { return new InternalHDRPercentiles(name, keys, merged, keyed, format, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java index 9d38780c9213e..5494f8480ba40 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java @@ -76,9 +76,7 @@ public InternalAggregation reduce(List aggregations, Reduce @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { final boolean anyResults = valuesSketch.size() > 0; - final Double mad = anyResults - ? getMedianAbsoluteDeviation() - : null; + final Double mad = anyResults ? getMedianAbsoluteDeviation() : null; builder.field(CommonFields.VALUE.getPreferredName(), mad); if (format != DocValueFormat.RAW && anyResults) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java index d4217c16f21b6..7376ca3fef242 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java @@ -56,8 +56,12 @@ public Object getProperty(List path) { public final double sortValue(String key) { if (key != null && false == key.equals("value")) { throw new IllegalArgumentException( - "Unknown value key [" + key + "] for single-value metric aggregation [" + getName() + - "]. Either use [value] as key or drop the key all together"); + "Unknown value key [" + + key + + "] for single-value metric aggregation [" + + getName() + + "]. Either use [value] as key or drop the key all together" + ); } return value(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java index 3d867b28ac449..1ff7c926cd433 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java @@ -101,15 +101,15 @@ public InternalAggregation reduce(List aggregations, Reduce params.putAll(firstAggregation.reduceScript.getParams()); } - ScriptedMetricAggContexts.ReduceScript.Factory factory = reduceContext.scriptService().compile( - firstAggregation.reduceScript, ScriptedMetricAggContexts.ReduceScript.CONTEXT); + ScriptedMetricAggContexts.ReduceScript.Factory factory = reduceContext.scriptService() + .compile(firstAggregation.reduceScript, ScriptedMetricAggContexts.ReduceScript.CONTEXT); ScriptedMetricAggContexts.ReduceScript script = factory.newInstance(params, aggregationObjects); Object scriptResult = script.execute(); CollectionUtils.ensureNoSelfReferences(scriptResult, "reduce script"); aggregation = Collections.singletonList(scriptResult); - } else if (reduceContext.isFinalReduce()) { + } else if (reduceContext.isFinalReduce()) { aggregation = Collections.singletonList(aggregationObjects); } else { // if we are not an final reduce we have to maintain all the aggs from all the incoming one @@ -147,8 +147,7 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalScriptedMetric other = (InternalScriptedMetric) obj; - return Objects.equals(reduceScript, other.reduceScript) && - Objects.equals(aggregations, other.aggregations); + return Objects.equals(reduceScript, other.reduceScript) && Objects.equals(aggregations, other.aggregations); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java index 2bbe0f41c5ea3..6b9a87d58e5e0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java @@ -25,7 +25,11 @@ public class InternalStats extends InternalNumericMetricsAggregation.MultiValue implements Stats { enum Metrics { - count, sum, min, max, avg; + count, + sum, + min, + max, + avg; public static Metrics resolve(String name) { return Metrics.valueOf(name); @@ -41,8 +45,15 @@ public static Metrics resolve(String name) { protected final double max; protected final double sum; - public InternalStats(String name, long count, double sum, double min, double max, DocValueFormat formatter, - Map metadata) { + public InternalStats( + String name, + long count, + double sum, + double min, + double max, + DocValueFormat formatter, + Map metadata + ) { super(name, metadata); this.count = count; this.sum = sum; @@ -73,8 +84,7 @@ protected final void doWriteTo(StreamOutput out) throws IOException { writeOtherStatsTo(out); } - protected void writeOtherStatsTo(StreamOutput out) throws IOException { - } + protected void writeOtherStatsTo(StreamOutput out) throws IOException {} @Override public String getWriteableName() { @@ -130,11 +140,16 @@ public String getSumAsString() { public double value(String name) { Metrics metrics = Metrics.valueOf(name); switch (metrics) { - case min: return this.min; - case max: return this.max; - case avg: return this.getAvg(); - case count: return this.count; - case sum: return this.sum; + case min: + return this.min; + case max: + return this.max; + case avg: + return this.getAvg(); + case count: + return this.count; + case sum: + return this.sum; default: throw new IllegalArgumentException("Unknown value [" + name + "] in common stats aggregation"); } @@ -216,9 +231,9 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalStats other = (InternalStats) obj; - return count == other.count && - Double.compare(min, other.min) == 0 && - Double.compare(max, other.max) == 0 && - Double.compare(sum, other.sum) == 0; + return count == other.count + && Double.compare(min, other.min) == 0 + && Double.compare(max, other.max) == 0 + && Double.compare(sum, other.sum) == 0; } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalSum.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalSum.java index b1eaf28e63e89..37361a298dff6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalSum.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalSum.java @@ -21,7 +21,7 @@ public class InternalSum extends InternalNumericMetricsAggregation.SingleValue implements Sum { private final double sum; - public InternalSum(String name, double sum, DocValueFormat formatter, Map metadata) { + public InternalSum(String name, double sum, DocValueFormat formatter, Map metadata) { super(name, metadata); this.sum = sum; this.format = formatter; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentileRanks.java index 03e429ce8126b..c0c79afc494cf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentileRanks.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentileRanks.java @@ -17,8 +17,14 @@ public class InternalTDigestPercentileRanks extends AbstractInternalTDigestPercentiles implements PercentileRanks { public static final String NAME = "tdigest_percentile_ranks"; - public InternalTDigestPercentileRanks(String name, double[] cdfValues, TDigestState state, boolean keyed, DocValueFormat formatter, - Map metadata) { + public InternalTDigestPercentileRanks( + String name, + double[] cdfValues, + TDigestState state, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) { super(name, cdfValues, state, keyed, formatter, metadata); } @@ -55,8 +61,13 @@ public double value(double key) { } @Override - protected AbstractInternalTDigestPercentiles createReduced(String name, double[] keys, TDigestState merged, boolean keyed, - Map metadata) { + protected AbstractInternalTDigestPercentiles createReduced( + String name, + double[] keys, + TDigestState merged, + boolean keyed, + Map metadata + ) { return new InternalTDigestPercentileRanks(name, keys, merged, keyed, format, metadata); } @@ -64,8 +75,7 @@ public static double percentileRank(TDigestState state, double value) { double percentileRank = state.cdf(value); if (percentileRank < 0) { percentileRank = 0; - } - else if (percentileRank > 1) { + } else if (percentileRank > 1) { percentileRank = 1; } return percentileRank * 100; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentiles.java index b18208f4cf345..699cb665bf007 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentiles.java @@ -17,8 +17,14 @@ public class InternalTDigestPercentiles extends AbstractInternalTDigestPercentiles implements Percentiles { public static final String NAME = "tdigest_percentiles"; - public InternalTDigestPercentiles(String name, double[] percents, TDigestState state, boolean keyed, DocValueFormat formatter, - Map metadata) { + public InternalTDigestPercentiles( + String name, + double[] percents, + TDigestState state, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) { super(name, percents, state, keyed, formatter, metadata); } @@ -55,8 +61,13 @@ public double value(double key) { } @Override - protected AbstractInternalTDigestPercentiles createReduced(String name, double[] keys, TDigestState merged, boolean keyed, - Map metadata) { + protected AbstractInternalTDigestPercentiles createReduced( + String name, + double[] keys, + TDigestState merged, + boolean keyed, + Map metadata + ) { return new InternalTDigestPercentiles(name, keys, merged, keyed, format, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index 7d71fbc438f62..1e571c4db5dda 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -30,15 +30,20 @@ /** * Results of the {@link TopHitsAggregator}. */ -public class -InternalTopHits extends InternalAggregation implements TopHits { +public class InternalTopHits extends InternalAggregation implements TopHits { private int from; private int size; private TopDocsAndMaxScore topDocs; private SearchHits searchHits; - public InternalTopHits(String name, int from, int size, TopDocsAndMaxScore topDocs, SearchHits searchHits, - Map metadata) { + public InternalTopHits( + String name, + int from, + int size, + TopDocsAndMaxScore topDocs, + SearchHits searchHits, + Map metadata + ) { super(name, metadata); this.from = from; this.size = size; @@ -147,9 +152,14 @@ public InternalAggregation reduce(List aggregations, Reduce hits[i] = shardHits[scoreDoc.shardIndex].getAt(position); } assert reducedTopDocs.totalHits.relation == Relation.EQUAL_TO; - return new InternalTopHits(name, this.from, this.size, + return new InternalTopHits( + name, + this.from, + this.size, new TopDocsAndMaxScore(reducedTopDocs, maxScore), - new SearchHits(hits, reducedTopDocs.totalHits, maxScore), getMetadata()); + new SearchHits(hits, reducedTopDocs.totalHits, maxScore), + getMetadata() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java index f777431d2e1df..2f574f17e2992 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java @@ -109,8 +109,8 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; InternalWeightedAvg other = (InternalWeightedAvg) obj; - return Objects.equals(sum, other.sum) && - Objects.equals(weight, other.weight) && - Objects.equals(format.getWriteableName(), other.format.getWriteableName()); + return Objects.equals(sum, other.sum) + && Objects.equals(weight, other.weight) + && Objects.equals(format.getWriteableName(), other.format.getWriteableName()); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java index e98f12ade0cc1..39f7d29386cb7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java @@ -46,9 +46,11 @@ public MaxAggregationBuilder(String name) { super(name); } - protected MaxAggregationBuilder(MaxAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + protected MaxAggregationBuilder( + MaxAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -75,13 +77,14 @@ protected void innerWriteTo(StreamOutput out) { } @Override - protected MaxAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - MetricAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new MaxAggregatorFactory(name, config, context, - parent, subFactoriesBuilder, metadata, aggregatorSupplier); + protected MaxAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + MetricAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new MaxAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java index f4de6bbd5f2b5..b3a60f379aa84 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java @@ -13,8 +13,8 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.Bits; import org.apache.lucene.util.FutureArrays; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; @@ -41,10 +41,8 @@ class MaxAggregator extends NumericMetricsAggregator.SingleValue { DoubleArray maxes; - MaxAggregator(String name, - ValuesSourceConfig config, - AggregationContext context, - Aggregator parent, Map metadata) throws IOException { + MaxAggregator(String name, ValuesSourceConfig config, AggregationContext context, Aggregator parent, Map metadata) + throws IOException { super(name, context, parent, metadata); // TODO stop expecting nulls here this.valuesSource = config.hasValues() ? (ValuesSource.Numeric) config.getValuesSource() : null; @@ -67,8 +65,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java index daa016b8a5c80..4effad4e687d2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java @@ -31,13 +31,19 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { MaxAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN), MaxAggregator::new, - true); + true + ); } - MaxAggregatorFactory(String name, ValuesSourceConfig config, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - MetricAggregatorSupplier aggregatorSupplier) throws IOException { + MaxAggregatorFactory( + String name, + ValuesSourceConfig config, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + MetricAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; } @@ -48,12 +54,8 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build(name, config, context, parent, metadata); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build(name, config, context, parent, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java index 798e050932e84..e5fb1b6491d9d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -38,8 +38,10 @@ public class MedianAbsoluteDeviationAggregationBuilder extends LeafOnly PARSER = - ObjectParser.fromBuilder(NAME, MedianAbsoluteDeviationAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + MedianAbsoluteDeviationAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); PARSER.declareDouble(MedianAbsoluteDeviationAggregationBuilder::compression, COMPRESSION_FIELD); @@ -60,9 +62,11 @@ public MedianAbsoluteDeviationAggregationBuilder(StreamInput in) throws IOExcept compression = in.readDouble(); } - protected MedianAbsoluteDeviationAggregationBuilder(MedianAbsoluteDeviationAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + protected MedianAbsoluteDeviationAggregationBuilder( + MedianAbsoluteDeviationAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.compression = clone.compression; } @@ -80,7 +84,8 @@ public double compression() { public MedianAbsoluteDeviationAggregationBuilder compression(double compression) { if (compression <= 0d) { throw new IllegalArgumentException( - "[" + COMPRESSION_FIELD.getPreferredName() + "] must be greater than 0. Found [" + compression + "] in [" + name + "]"); + "[" + COMPRESSION_FIELD.getPreferredName() + "] must be greater than 0. Found [" + compression + "] in [" + name + "]" + ); } this.compression = compression; return this; @@ -102,17 +107,26 @@ protected void innerWriteTo(StreamOutput out) throws IOException { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) - throws IOException { - - MedianAbsoluteDeviationAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - - return new MedianAbsoluteDeviationAggregatorFactory(name, config, context, - parent, subFactoriesBuilder, metadata, compression, aggregatorSupplier); + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + + MedianAbsoluteDeviationAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry() + .getAggregator(REGISTRY_KEY, config); + + return new MedianAbsoluteDeviationAggregatorFactory( + name, + config, + context, + parent, + subFactoriesBuilder, + metadata, + compression, + aggregatorSupplier + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregator.java index 18383bb357199..e052337eeeb2d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregator.java @@ -10,9 +10,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -37,13 +37,15 @@ public class MedianAbsoluteDeviationAggregator extends NumericMetricsAggregator. private ObjectArray valueSketches; - MedianAbsoluteDeviationAggregator(String name, - @Nullable ValuesSource valuesSource, - DocValueFormat format, - AggregationContext context, - Aggregator parent, - Map metadata, - double compression) throws IOException { + MedianAbsoluteDeviationAggregator( + String name, + @Nullable ValuesSource valuesSource, + DocValueFormat format, + AggregationContext context, + Aggregator parent, + Map metadata, + double compression + ) throws IOException { super(name, context, parent, metadata); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorFactory.java index f70c1ed4d5872..80082d83b2c88 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorFactory.java @@ -26,14 +26,16 @@ public class MedianAbsoluteDeviationAggregatorFactory extends ValuesSourceAggreg private final MedianAbsoluteDeviationAggregatorSupplier aggregatorSupplier; private final double compression; - MedianAbsoluteDeviationAggregatorFactory(String name, - ValuesSourceConfig config, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - double compression, - MedianAbsoluteDeviationAggregatorSupplier aggregatorSupplier) throws IOException { + MedianAbsoluteDeviationAggregatorFactory( + String name, + ValuesSourceConfig config, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + double compression, + MedianAbsoluteDeviationAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; @@ -45,29 +47,18 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { MedianAbsoluteDeviationAggregationBuilder.REGISTRY_KEY, CoreValuesSourceType.NUMERIC, MedianAbsoluteDeviationAggregator::new, - true); + true + ); } @Override protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - return new MedianAbsoluteDeviationAggregator( - name, - null, - config.format(), - context, - parent, - metadata, - compression - ); + return new MedianAbsoluteDeviationAggregator(name, null, config.format(), context, parent, metadata, compression); } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build(name, config.getValuesSource(), config.format(), context, parent, metadata, compression); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build(name, config.getValuesSource(), config.format(), context, parent, metadata, compression); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorSupplier.java index ef7b956b89b13..ececd881f226f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorSupplier.java @@ -16,11 +16,13 @@ import java.util.Map; public interface MedianAbsoluteDeviationAggregatorSupplier { - Aggregator build(String name, - ValuesSource valuesSource, - DocValueFormat format, - AggregationContext context, - Aggregator parent, - Map metadata, - double compression) throws IOException; + Aggregator build( + String name, + ValuesSource valuesSource, + DocValueFormat format, + AggregationContext context, + Aggregator parent, + Map metadata, + double compression + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricAggregatorSupplier.java index 0c968a5f827e3..bc86381c5c793 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricAggregatorSupplier.java @@ -15,9 +15,11 @@ import java.util.Map; public interface MetricAggregatorSupplier { - Aggregator build(String name, - ValuesSourceConfig valuesSourceConfig, - AggregationContext context, - Aggregator parent, - Map metadata) throws IOException; + Aggregator build( + String name, + ValuesSourceConfig valuesSourceConfig, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java index 50cde2bd552cf..1fbab64a17a36 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java @@ -40,7 +40,7 @@ public static boolean hasValue(InternalMedianAbsoluteDeviation agg) { public static boolean hasValue(InternalScriptedMetric agg) { // TODO better way to know if the scripted metric received documents? // Could check for null too, but a script might return null on purpose... - return agg.aggregationsList().size() > 0 ; + return agg.aggregationsList().size() > 0; } public static boolean hasValue(InternalTDigestPercentileRanks agg) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java index 25fa6d7c25ac3..e3a48c9592f84 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java @@ -42,9 +42,11 @@ public MinAggregationBuilder(String name) { super(name); } - protected MinAggregationBuilder(MinAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + protected MinAggregationBuilder( + MinAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -75,15 +77,16 @@ protected void innerWriteTo(StreamOutput out) { } @Override - protected MinAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { + protected MinAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { - MetricAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + MetricAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new MinAggregatorFactory(name, config, context, - parent, subFactoriesBuilder, metadata, aggregatorSupplier); + return new MinAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java index b060313f178f1..bdd7b6f7d3648 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java @@ -13,8 +13,8 @@ import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.Bits; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; @@ -42,11 +42,8 @@ public class MinAggregator extends NumericMetricsAggregator.SingleValue { DoubleArray mins; - MinAggregator(String name, - ValuesSourceConfig config, - AggregationContext context, - Aggregator parent, - Map metadata) throws IOException { + MinAggregator(String name, ValuesSourceConfig config, AggregationContext context, Aggregator parent, Map metadata) + throws IOException { super(name, context, parent, metadata); // TODO: Stop using nulls here this.valuesSource = config.hasValues() ? (ValuesSource.Numeric) config.getValuesSource() : null; @@ -69,8 +66,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } @@ -135,7 +131,6 @@ public void doClose() { Releasables.close(mins); } - /** * Returns the minimum value indexed in the fieldName field or null * if the value cannot be inferred from the indexed {@link PointValues}. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java index 0a995aba92e00..c6e7f5eee0890 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java @@ -31,13 +31,19 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { MinAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN), MinAggregator::new, - true); + true + ); } - MinAggregatorFactory(String name, ValuesSourceConfig config, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - MetricAggregatorSupplier aggregatorSupplier) throws IOException { + MinAggregatorFactory( + String name, + ValuesSourceConfig config, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + MetricAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; } @@ -48,12 +54,8 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound bucketCardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build(name, config, context, parent, metadata); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound bucketCardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build(name, config, context, parent, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/NumericMetricsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/NumericMetricsAggregator.java index d594cf3cf9e78..081a82690a5f8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/NumericMetricsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/NumericMetricsAggregator.java @@ -33,8 +33,14 @@ protected SingleValue(String name, AggregationContext context, Aggregator parent @Override public BucketComparator bucketComparator(String key, SortOrder order) { if (key != null && false == "value".equals(key)) { - throw new IllegalArgumentException("Ordering on a single-value metrics aggregation can only be done on its value. " + - "Either drop the key (a la \"" + name() + "\") or change it to \"value\" (a la \"" + name() + ".value\")"); + throw new IllegalArgumentException( + "Ordering on a single-value metrics aggregation can only be done on its value. " + + "Either drop the key (a la \"" + + name() + + "\") or change it to \"value\" (a la \"" + + name() + + ".value\")" + ); } return (lhs, rhs) -> Comparators.compareDiscardNaN(metric(lhs), metric(rhs), order == SortOrder.ASC); } @@ -56,8 +62,7 @@ public BucketComparator bucketComparator(String key, SortOrder order) { throw new IllegalArgumentException("When ordering on a multi-value metrics aggregation a metric name must be specified."); } if (false == hasMetric(key)) { - throw new IllegalArgumentException( - "Unknown metric name [" + key + "] on multi-value metrics aggregation [" + name() + "]"); + throw new IllegalArgumentException("Unknown metric name [" + key + "] on multi-value metrics aggregation [" + name() + "]"); } // TODO it'd be faster replace hasMetric and metric with something that returned a function from long to double. return (lhs, rhs) -> Comparators.compareDiscardNaN(metric(key, lhs), metric(key, rhs), order == SortOrder.ASC); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedCardinality.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedCardinality.java index e0e39ed44e47a..f6be0d4f6a12c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedCardinality.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedCardinality.java @@ -40,7 +40,10 @@ public String getType() { } private static final ObjectParser PARSER = new ObjectParser<>( - ParsedCardinality.class.getSimpleName(), true, ParsedCardinality::new); + ParsedCardinality.class.getSimpleName(), + true, + ParsedCardinality::new + ); static { declareAggregationFields(PARSER); @@ -54,8 +57,7 @@ public static ParsedCardinality fromXContent(XContentParser parser, final String } @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) - throws IOException { + protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.VALUE.getPreferredName(), cardinalityValue); return builder; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedExtendedStats.java index 8006c45f046fa..22bb3d2340a30 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedExtendedStats.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats.Fields; @@ -148,23 +148,35 @@ public String getStdDeviationSamplingAsString() { public String getStdDeviationBoundAsString(Bounds bound) { switch (bound) { case UPPER: - return valueAsString.getOrDefault(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper", - Double.toString(stdDeviationBoundUpper)); + return valueAsString.getOrDefault( + Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper", + Double.toString(stdDeviationBoundUpper) + ); case UPPER_POPULATION: - return valueAsString.getOrDefault(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper_population", - Double.toString(stdDeviationBoundUpperPopulation)); + return valueAsString.getOrDefault( + Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper_population", + Double.toString(stdDeviationBoundUpperPopulation) + ); case UPPER_SAMPLING: - return valueAsString.getOrDefault(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper_sampling", - Double.toString(stdDeviationBoundUpperSampling)); + return valueAsString.getOrDefault( + Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_upper_sampling", + Double.toString(stdDeviationBoundUpperSampling) + ); case LOWER: - return valueAsString.getOrDefault(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower", - Double.toString(stdDeviationBoundLower)); + return valueAsString.getOrDefault( + Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower", + Double.toString(stdDeviationBoundLower) + ); case LOWER_POPULATION: - return valueAsString.getOrDefault(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower_population", - Double.toString(stdDeviationBoundLowerPopulation)); + return valueAsString.getOrDefault( + Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower_population", + Double.toString(stdDeviationBoundLowerPopulation) + ); case LOWER_SAMPLING: - return valueAsString.getOrDefault(Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower_sampling", - Double.toString(stdDeviationBoundLowerSampling)); + return valueAsString.getOrDefault( + Fields.STD_DEVIATION_BOUNDS_AS_STRING + "_lower_sampling", + Double.toString(stdDeviationBoundLowerSampling) + ); default: throw new IllegalArgumentException("Unknown bounds type " + bound); } @@ -251,30 +263,61 @@ protected XContentBuilder otherStatsToXContent(XContentBuilder builder, Params p return builder; } - private static final ObjectParser PARSER = new ObjectParser<>(ParsedExtendedStats.class.getSimpleName(), - true, ParsedExtendedStats::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedExtendedStats.class.getSimpleName(), + true, + ParsedExtendedStats::new + ); private static final ConstructingObjectParser, Void> STD_BOUNDS_PARSER = new ConstructingObjectParser<>( - ParsedExtendedStats.class.getSimpleName() + "_STD_BOUNDS", true, args -> Arrays.stream(args).map(d -> (Double) d).collect( - Collectors.toList())); + ParsedExtendedStats.class.getSimpleName() + "_STD_BOUNDS", + true, + args -> Arrays.stream(args).map(d -> (Double) d).collect(Collectors.toList()) + ); private static final ConstructingObjectParser, Void> STD_BOUNDS_AS_STRING_PARSER = new ConstructingObjectParser<>( - ParsedExtendedStats.class.getSimpleName() + "_STD_BOUNDS_AS_STRING", true, args -> Arrays.stream(args).map(d -> (String) d).collect( - Collectors.toList())); + ParsedExtendedStats.class.getSimpleName() + "_STD_BOUNDS_AS_STRING", + true, + args -> Arrays.stream(args).map(d -> (String) d).collect(Collectors.toList()) + ); static { - STD_BOUNDS_PARSER.declareField(constructorArg(), (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.UPPER), ValueType.DOUBLE_OR_NULL); - STD_BOUNDS_PARSER.declareField(constructorArg(), (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.LOWER), ValueType.DOUBLE_OR_NULL); - STD_BOUNDS_PARSER.declareField(optionalConstructorArg(), (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.UPPER_POPULATION), ValueType.DOUBLE_OR_NULL); - STD_BOUNDS_PARSER.declareField(optionalConstructorArg(), (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.LOWER_POPULATION), ValueType.DOUBLE_OR_NULL); - STD_BOUNDS_PARSER.declareField(optionalConstructorArg(), (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.UPPER_SAMPLING), ValueType.DOUBLE_OR_NULL); - STD_BOUNDS_PARSER.declareField(optionalConstructorArg(), (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.LOWER_SAMPLING), ValueType.DOUBLE_OR_NULL); + STD_BOUNDS_PARSER.declareField( + constructorArg(), + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.UPPER), + ValueType.DOUBLE_OR_NULL + ); + STD_BOUNDS_PARSER.declareField( + constructorArg(), + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.LOWER), + ValueType.DOUBLE_OR_NULL + ); + STD_BOUNDS_PARSER.declareField( + optionalConstructorArg(), + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.UPPER_POPULATION), + ValueType.DOUBLE_OR_NULL + ); + STD_BOUNDS_PARSER.declareField( + optionalConstructorArg(), + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.LOWER_POPULATION), + ValueType.DOUBLE_OR_NULL + ); + STD_BOUNDS_PARSER.declareField( + optionalConstructorArg(), + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.UPPER_SAMPLING), + ValueType.DOUBLE_OR_NULL + ); + STD_BOUNDS_PARSER.declareField( + optionalConstructorArg(), + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.LOWER_SAMPLING), + ValueType.DOUBLE_OR_NULL + ); STD_BOUNDS_AS_STRING_PARSER.declareString(constructorArg(), new ParseField(Fields.UPPER)); STD_BOUNDS_AS_STRING_PARSER.declareString(constructorArg(), new ParseField(Fields.LOWER)); STD_BOUNDS_AS_STRING_PARSER.declareString(optionalConstructorArg(), new ParseField(Fields.UPPER_POPULATION)); @@ -286,38 +329,86 @@ protected XContentBuilder otherStatsToXContent(XContentBuilder builder, Params p protected static void declareExtendedStatsFields(ObjectParser objectParser) { declareStatsFields(objectParser); - objectParser.declareField((agg, value) -> agg.sumOfSquares = value, (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.SUM_OF_SQRS), ValueType.DOUBLE_OR_NULL); - objectParser.declareField((agg, value) -> agg.variance = value, (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.VARIANCE), ValueType.DOUBLE_OR_NULL); - objectParser.declareField((agg, value) -> agg.variancePopulation = value, (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.VARIANCE_POPULATION), ValueType.DOUBLE_OR_NULL); - objectParser.declareField((agg, value) -> agg.varianceSampling = value, (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.VARIANCE_SAMPLING), ValueType.DOUBLE_OR_NULL); - objectParser.declareField((agg, value) -> agg.stdDeviation = value, (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.STD_DEVIATION), ValueType.DOUBLE_OR_NULL); - objectParser.declareField((agg, value) -> agg.stdDeviationPopulation = value, (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.STD_DEVIATION_POPULATION), ValueType.DOUBLE_OR_NULL); - objectParser.declareField((agg, value) -> agg.stdDeviationSampling = value, (parser, context) -> parseDouble(parser, 0), - new ParseField(Fields.STD_DEVIATION_SAMPLING), ValueType.DOUBLE_OR_NULL); - objectParser.declareObject(ParsedExtendedStats::setStdDeviationBounds, STD_BOUNDS_PARSER, - new ParseField(Fields.STD_DEVIATION_BOUNDS)); - objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.SUM_OF_SQRS_AS_STRING, value), - new ParseField(Fields.SUM_OF_SQRS_AS_STRING)); - objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.VARIANCE_AS_STRING, value), - new ParseField(Fields.VARIANCE_AS_STRING)); - objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.VARIANCE_POPULATION_AS_STRING, value), - new ParseField(Fields.VARIANCE_POPULATION_AS_STRING)); - objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.VARIANCE_SAMPLING_AS_STRING, value), - new ParseField(Fields.VARIANCE_SAMPLING_AS_STRING)); - objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.STD_DEVIATION_AS_STRING, value), - new ParseField(Fields.STD_DEVIATION_AS_STRING)); - objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.STD_DEVIATION_POPULATION_AS_STRING, value), - new ParseField(Fields.STD_DEVIATION_POPULATION_AS_STRING)); - objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.STD_DEVIATION_SAMPLING_AS_STRING, value), - new ParseField(Fields.STD_DEVIATION_SAMPLING_AS_STRING)); - objectParser.declareObject(ParsedExtendedStats::setStdDeviationBoundsAsString, STD_BOUNDS_AS_STRING_PARSER, - new ParseField(Fields.STD_DEVIATION_BOUNDS_AS_STRING)); + objectParser.declareField( + (agg, value) -> agg.sumOfSquares = value, + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.SUM_OF_SQRS), + ValueType.DOUBLE_OR_NULL + ); + objectParser.declareField( + (agg, value) -> agg.variance = value, + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.VARIANCE), + ValueType.DOUBLE_OR_NULL + ); + objectParser.declareField( + (agg, value) -> agg.variancePopulation = value, + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.VARIANCE_POPULATION), + ValueType.DOUBLE_OR_NULL + ); + objectParser.declareField( + (agg, value) -> agg.varianceSampling = value, + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.VARIANCE_SAMPLING), + ValueType.DOUBLE_OR_NULL + ); + objectParser.declareField( + (agg, value) -> agg.stdDeviation = value, + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.STD_DEVIATION), + ValueType.DOUBLE_OR_NULL + ); + objectParser.declareField( + (agg, value) -> agg.stdDeviationPopulation = value, + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.STD_DEVIATION_POPULATION), + ValueType.DOUBLE_OR_NULL + ); + objectParser.declareField( + (agg, value) -> agg.stdDeviationSampling = value, + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.STD_DEVIATION_SAMPLING), + ValueType.DOUBLE_OR_NULL + ); + objectParser.declareObject( + ParsedExtendedStats::setStdDeviationBounds, + STD_BOUNDS_PARSER, + new ParseField(Fields.STD_DEVIATION_BOUNDS) + ); + objectParser.declareString( + (agg, value) -> agg.valueAsString.put(Fields.SUM_OF_SQRS_AS_STRING, value), + new ParseField(Fields.SUM_OF_SQRS_AS_STRING) + ); + objectParser.declareString( + (agg, value) -> agg.valueAsString.put(Fields.VARIANCE_AS_STRING, value), + new ParseField(Fields.VARIANCE_AS_STRING) + ); + objectParser.declareString( + (agg, value) -> agg.valueAsString.put(Fields.VARIANCE_POPULATION_AS_STRING, value), + new ParseField(Fields.VARIANCE_POPULATION_AS_STRING) + ); + objectParser.declareString( + (agg, value) -> agg.valueAsString.put(Fields.VARIANCE_SAMPLING_AS_STRING, value), + new ParseField(Fields.VARIANCE_SAMPLING_AS_STRING) + ); + objectParser.declareString( + (agg, value) -> agg.valueAsString.put(Fields.STD_DEVIATION_AS_STRING, value), + new ParseField(Fields.STD_DEVIATION_AS_STRING) + ); + objectParser.declareString( + (agg, value) -> agg.valueAsString.put(Fields.STD_DEVIATION_POPULATION_AS_STRING, value), + new ParseField(Fields.STD_DEVIATION_POPULATION_AS_STRING) + ); + objectParser.declareString( + (agg, value) -> agg.valueAsString.put(Fields.STD_DEVIATION_SAMPLING_AS_STRING, value), + new ParseField(Fields.STD_DEVIATION_SAMPLING_AS_STRING) + ); + objectParser.declareObject( + ParsedExtendedStats::setStdDeviationBoundsAsString, + STD_BOUNDS_AS_STRING_PARSER, + new ParseField(Fields.STD_DEVIATION_BOUNDS_AS_STRING) + ); } public static ParsedExtendedStats fromXContent(XContentParser parser, final String name) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoBounds.java index 30586196e88a3..078624794e9cf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoBounds.java @@ -8,14 +8,14 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.geo.GeoBoundingBox; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Tuple; import org.elasticsearch.search.aggregations.ParsedAggregation; import java.io.IOException; @@ -60,21 +60,31 @@ public GeoPoint bottomRight() { return geoBoundingBox != null ? geoBoundingBox.bottomRight() : null; } - private static final ObjectParser PARSER = new ObjectParser<>(ParsedGeoBounds.class.getSimpleName(), true, - ParsedGeoBounds::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedGeoBounds.class.getSimpleName(), + true, + ParsedGeoBounds::new + ); - private static final ConstructingObjectParser, Void> BOUNDS_PARSER = - new ConstructingObjectParser<>(ParsedGeoBounds.class.getSimpleName() + "_BOUNDS", true, - args -> new Tuple<>((GeoPoint) args[0], (GeoPoint) args[1])); + private static final ConstructingObjectParser, Void> BOUNDS_PARSER = new ConstructingObjectParser<>( + ParsedGeoBounds.class.getSimpleName() + "_BOUNDS", + true, + args -> new Tuple<>((GeoPoint) args[0], (GeoPoint) args[1]) + ); private static final ObjectParser GEO_POINT_PARSER = new ObjectParser<>( - ParsedGeoBounds.class.getSimpleName() + "_POINT", true, GeoPoint::new); + ParsedGeoBounds.class.getSimpleName() + "_POINT", + true, + GeoPoint::new + ); static { declareAggregationFields(PARSER); - PARSER.declareObject((agg, bbox) -> { - agg.geoBoundingBox = new GeoBoundingBox(bbox.v1(), bbox.v2()); - }, BOUNDS_PARSER, BOUNDS_FIELD); + PARSER.declareObject( + (agg, bbox) -> { agg.geoBoundingBox = new GeoBoundingBox(bbox.v1(), bbox.v2()); }, + BOUNDS_PARSER, + BOUNDS_FIELD + ); BOUNDS_PARSER.declareObject(constructorArg(), GEO_POINT_PARSER, TOP_LEFT_FIELD); BOUNDS_PARSER.declareObject(constructorArg(), GEO_POINT_PARSER, BOTTOM_RIGHT_FIELD); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoCentroid.java index 2278b969fa169..ac1f881cf4d8e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoCentroid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoCentroid.java @@ -53,11 +53,17 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th return builder; } - private static final ObjectParser PARSER = new ObjectParser<>(ParsedGeoCentroid.class.getSimpleName(), true, - ParsedGeoCentroid::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedGeoCentroid.class.getSimpleName(), + true, + ParsedGeoCentroid::new + ); private static final ObjectParser GEO_POINT_PARSER = new ObjectParser<>( - ParsedGeoCentroid.class.getSimpleName() + "_POINT", true, GeoPoint::new); + ParsedGeoCentroid.class.getSimpleName() + "_POINT", + true, + GeoPoint::new + ); static { declareAggregationFields(PARSER); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentileRanks.java index 332c60446303a..84a54b3295ce8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentileRanks.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentileRanks.java @@ -38,8 +38,11 @@ public Percentile next() { }; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedHDRPercentileRanks.class.getSimpleName(), true, ParsedHDRPercentileRanks::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedHDRPercentileRanks.class.getSimpleName(), + true, + ParsedHDRPercentileRanks::new + ); static { ParsedPercentiles.declarePercentilesFields(PARSER); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentiles.java index d8e398ab64c9b..b56fc98b306ee 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentiles.java @@ -31,8 +31,11 @@ public String percentileAsString(double percent) { return getPercentileAsString(percent); } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedHDRPercentiles.class.getSimpleName(), true, ParsedHDRPercentiles::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedHDRPercentiles.class.getSimpleName(), + true, + ParsedHDRPercentiles::new + ); static { ParsedPercentiles.declarePercentilesFields(PARSER); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentiles.java index ae19266c30a61..e75026257a1cd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentiles.java @@ -19,7 +19,7 @@ import java.util.LinkedHashMap; import java.util.Map; -public abstract class ParsedPercentiles extends ParsedAggregation implements Iterable { +public abstract class ParsedPercentiles extends ParsedAggregation implements Iterable { protected final Map percentiles = new LinkedHashMap<>(); protected final Map percentilesAsString = new HashMap<>(); @@ -61,6 +61,7 @@ void setKeyed(boolean keyed) { public Iterator iterator() { return new Iterator() { final Iterator> iterator = percentiles.entrySet().iterator(); + @Override public boolean hasNext() { return iterator.hasNext(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedScriptedMetric.java index 2cba686558eaf..4ee11228510cc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedScriptedMetric.java @@ -39,13 +39,20 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th return builder.field(CommonFields.VALUE.getPreferredName(), aggregation()); } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedScriptedMetric.class.getSimpleName(), true, ParsedScriptedMetric::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedScriptedMetric.class.getSimpleName(), + true, + ParsedScriptedMetric::new + ); static { declareAggregationFields(PARSER); - PARSER.declareField((agg, value) -> agg.aggregation = Collections.singletonList(value), - ParsedScriptedMetric::parseValue, CommonFields.VALUE, ValueType.VALUE_OBJECT_ARRAY); + PARSER.declareField( + (agg, value) -> agg.aggregation = Collections.singletonList(value), + ParsedScriptedMetric::parseValue, + CommonFields.VALUE, + ValueType.VALUE_OBJECT_ARRAY + ); } private static Object parseValue(XContentParser parser) throws IOException { @@ -55,14 +62,14 @@ private static Object parseValue(XContentParser parser) throws IOException { value = null; } else if (token.isValue()) { if (token == XContentParser.Token.VALUE_STRING) { - //binary values will be parsed back and returned as base64 strings when reading from json and yaml + // binary values will be parsed back and returned as base64 strings when reading from json and yaml value = parser.text(); } else if (token == XContentParser.Token.VALUE_NUMBER) { value = parser.numberValue(); } else if (token == XContentParser.Token.VALUE_BOOLEAN) { value = parser.booleanValue(); } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { - //binary values will be parsed back and returned as BytesArray when reading from cbor and smile + // binary values will be parsed back and returned as BytesArray when reading from cbor and smile value = new BytesArray(parser.binaryValue()); } } else if (token == XContentParser.Token.START_OBJECT) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSingleValueNumericMetricsAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSingleValueNumericMetricsAggregation.java index d25c009b9c755..e34e3631992e2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSingleValueNumericMetricsAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSingleValueNumericMetricsAggregation.java @@ -12,7 +12,8 @@ import org.elasticsearch.search.aggregations.ParsedAggregation; public abstract class ParsedSingleValueNumericMetricsAggregation extends ParsedAggregation - implements NumericMetricsAggregation.SingleValue { + implements + NumericMetricsAggregation.SingleValue { protected double value; protected String valueAsString; @@ -39,11 +40,17 @@ protected void setValueAsString(String valueAsString) { this.valueAsString = valueAsString; } - protected static void declareSingleValueFields(ObjectParser objectParser, - double defaultNullValue) { + protected static void declareSingleValueFields( + ObjectParser objectParser, + double defaultNullValue + ) { declareAggregationFields(objectParser); - objectParser.declareField(ParsedSingleValueNumericMetricsAggregation::setValue, - (parser, context) -> parseDouble(parser, defaultNullValue), CommonFields.VALUE, ValueType.DOUBLE_OR_NULL); + objectParser.declareField( + ParsedSingleValueNumericMetricsAggregation::setValue, + (parser, context) -> parseDouble(parser, defaultNullValue), + CommonFields.VALUE, + ValueType.DOUBLE_OR_NULL + ); objectParser.declareString(ParsedSingleValueNumericMetricsAggregation::setValueAsString, CommonFields.VALUE_AS_STRING); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedStats.java index 5971dafc85b1e..adfd0c1c67c2d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedStats.java @@ -8,9 +8,9 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.ParsedAggregation; @@ -82,11 +82,16 @@ public String getSumAsString() { public double value(String name) { Metrics metrics = Metrics.valueOf(name); switch (metrics) { - case min: return min; - case max: return max; - case avg: return avg; - case count: return count; - case sum: return sum; + case min: + return min; + case max: + return max; + case avg: + return avg; + case count: + return count; + case sum: + return sum; default: throw new IllegalArgumentException("Unknown value [" + name + "] in common stats aggregation"); } @@ -126,8 +131,11 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) return builder; } - private static final ObjectParser PARSER = new ObjectParser<>(ParsedStats.class.getSimpleName(), true, - ParsedStats::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedStats.class.getSimpleName(), + true, + ParsedStats::new + ); static { declareStatsFields(PARSER); @@ -136,22 +144,46 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) protected static void declareStatsFields(ObjectParser objectParser) { declareAggregationFields(objectParser); objectParser.declareLong((agg, value) -> agg.count = value, new ParseField(Fields.COUNT)); - objectParser.declareField((agg, value) -> agg.min = value, (parser, context) -> parseDouble(parser, Double.POSITIVE_INFINITY), - new ParseField(Fields.MIN), ValueType.DOUBLE_OR_NULL); - objectParser.declareField((agg, value) -> agg.max = value, (parser, context) -> parseDouble(parser, Double.NEGATIVE_INFINITY), - new ParseField(Fields.MAX), ValueType.DOUBLE_OR_NULL); - objectParser.declareField((agg, value) -> agg.avg = value, (parser, context) -> parseDouble(parser, 0), new ParseField(Fields.AVG), - ValueType.DOUBLE_OR_NULL); - objectParser.declareField((agg, value) -> agg.sum = value, (parser, context) -> parseDouble(parser, 0), new ParseField(Fields.SUM), - ValueType.DOUBLE_OR_NULL); - objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.MIN_AS_STRING, value), - new ParseField(Fields.MIN_AS_STRING)); - objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.MAX_AS_STRING, value), - new ParseField(Fields.MAX_AS_STRING)); - objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.AVG_AS_STRING, value), - new ParseField(Fields.AVG_AS_STRING)); - objectParser.declareString((agg, value) -> agg.valueAsString.put(Fields.SUM_AS_STRING, value), - new ParseField(Fields.SUM_AS_STRING)); + objectParser.declareField( + (agg, value) -> agg.min = value, + (parser, context) -> parseDouble(parser, Double.POSITIVE_INFINITY), + new ParseField(Fields.MIN), + ValueType.DOUBLE_OR_NULL + ); + objectParser.declareField( + (agg, value) -> agg.max = value, + (parser, context) -> parseDouble(parser, Double.NEGATIVE_INFINITY), + new ParseField(Fields.MAX), + ValueType.DOUBLE_OR_NULL + ); + objectParser.declareField( + (agg, value) -> agg.avg = value, + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.AVG), + ValueType.DOUBLE_OR_NULL + ); + objectParser.declareField( + (agg, value) -> agg.sum = value, + (parser, context) -> parseDouble(parser, 0), + new ParseField(Fields.SUM), + ValueType.DOUBLE_OR_NULL + ); + objectParser.declareString( + (agg, value) -> agg.valueAsString.put(Fields.MIN_AS_STRING, value), + new ParseField(Fields.MIN_AS_STRING) + ); + objectParser.declareString( + (agg, value) -> agg.valueAsString.put(Fields.MAX_AS_STRING, value), + new ParseField(Fields.MAX_AS_STRING) + ); + objectParser.declareString( + (agg, value) -> agg.valueAsString.put(Fields.AVG_AS_STRING, value), + new ParseField(Fields.AVG_AS_STRING) + ); + objectParser.declareString( + (agg, value) -> agg.valueAsString.put(Fields.SUM_AS_STRING, value), + new ParseField(Fields.SUM_AS_STRING) + ); } public static ParsedStats fromXContent(XContentParser parser, final String name) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentileRanks.java index bafa57ad77ab1..a0d295e40a2f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentileRanks.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentileRanks.java @@ -38,8 +38,11 @@ public Percentile next() { }; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedTDigestPercentileRanks.class.getSimpleName(), true, ParsedTDigestPercentileRanks::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedTDigestPercentileRanks.class.getSimpleName(), + true, + ParsedTDigestPercentileRanks::new + ); static { ParsedPercentiles.declarePercentilesFields(PARSER); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentiles.java index 336e92f08d932..e814ce27d3989 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentiles.java @@ -41,8 +41,11 @@ public Iterable valueNames() { return percentiles.keySet().stream().map(d -> d.toString()).collect(Collectors.toList()); } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedTDigestPercentiles.class.getSimpleName(), true, ParsedTDigestPercentiles::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedTDigestPercentiles.class.getSimpleName(), + true, + ParsedTDigestPercentiles::new + ); static { ParsedPercentiles.declarePercentilesFields(PARSER); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTopHits.java index 4b93cc9a86c4b..230e6c63d76f1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTopHits.java @@ -8,8 +8,8 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchHits; @@ -36,12 +36,18 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) return searchHits.toXContent(builder, params); } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedTopHits.class.getSimpleName(), true, ParsedTopHits::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedTopHits.class.getSimpleName(), + true, + ParsedTopHits::new + ); static { declareAggregationFields(PARSER); - PARSER.declareObject((topHit, searchHits) -> topHit.searchHits = searchHits, (parser, context) -> SearchHits.fromXContent(parser), - new ParseField(SearchHits.Fields.HITS)); + PARSER.declareObject( + (topHit, searchHits) -> topHit.searchHits = searchHits, + (parser, context) -> SearchHits.fromXContent(parser), + new ParseField(SearchHits.Fields.HITS) + ); } public static ParsedTopHits fromXContent(XContentParser parser, String name) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedValueCount.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedValueCount.java index 3938ecd155613..819bf729341dd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedValueCount.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedValueCount.java @@ -47,8 +47,11 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) return builder; } - private static final ObjectParser PARSER = new ObjectParser<>(ParsedValueCount.class.getSimpleName(), true, - ParsedValueCount::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedValueCount.class.getSimpleName(), + true, + ParsedValueCount::new + ); static { declareAggregationFields(PARSER); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java index b1522f432868d..d3e744b91590a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java @@ -38,8 +38,11 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) return builder; } - private static final ObjectParser PARSER - = new ObjectParser<>(ParsedWeightedAvg.class.getSimpleName(), true, ParsedWeightedAvg::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedWeightedAvg.class.getSimpleName(), + true, + ParsedWeightedAvg::new + ); static { declareSingleValueFields(PARSER, Double.POSITIVE_INFINITY); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Percentile.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Percentile.java index 97464ef15eecb..2cedf128b4ab6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Percentile.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Percentile.java @@ -37,8 +37,7 @@ public boolean equals(Object o) { return false; } Percentile that = (Percentile) o; - return Double.compare(that.percent, percent) == 0 - && Double.compare(that.value, value) == 0; + return Double.compare(that.percent, percent) == 0 && Double.compare(that.value, value) == 0; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java index 2482a5589b590..f0b4c3191ffd6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java @@ -8,9 +8,9 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -30,12 +30,13 @@ public class PercentileRanksAggregationBuilder extends AbstractPercentilesAggreg new ValuesSourceRegistry.RegistryKey<>(NAME, PercentilesAggregatorSupplier.class); private static final ParseField VALUES_FIELD = new ParseField("values"); - public static final ConstructingObjectParser PARSER = - AbstractPercentilesAggregationBuilder.createParser( - PercentileRanksAggregationBuilder.NAME, - PercentileRanksAggregationBuilder::new, - PercentilesConfig.TDigest::new, - VALUES_FIELD); + public static final ConstructingObjectParser PARSER = AbstractPercentilesAggregationBuilder + .createParser( + PercentileRanksAggregationBuilder.NAME, + PercentileRanksAggregationBuilder::new, + PercentilesConfig.TDigest::new, + VALUES_FIELD + ); public static void registerAggregators(ValuesSourceRegistry.Builder builder) { PercentileRanksAggregatorFactory.registerAggregators(builder); @@ -53,9 +54,11 @@ public PercentileRanksAggregationBuilder(StreamInput in) throws IOException { super(VALUES_FIELD, in); } - private PercentileRanksAggregationBuilder(PercentileRanksAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + private PercentileRanksAggregationBuilder( + PercentileRanksAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -77,16 +80,27 @@ public double[] values() { } @Override - protected ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - - PercentilesAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - - return new PercentileRanksAggregatorFactory(name, config, values, configOrDefault(), keyed, context, - parent, subFactoriesBuilder, metadata, aggregatorSupplier); + protected ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + + PercentilesAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + + return new PercentileRanksAggregatorFactory( + name, + config, + values, + configOrDefault(), + keyed, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregatorFactory.java index f5c696a9d2803..058ebce142e18 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregatorFactory.java @@ -35,19 +35,22 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN), (name, valuesSource, context, parent, percents, percentilesConfig, keyed, formatter, metadata) -> percentilesConfig .createPercentileRanksAggregator(name, valuesSource, context, parent, percents, keyed, formatter, metadata), - true); + true + ); } - PercentileRanksAggregatorFactory(String name, - ValuesSourceConfig config, - double[] percents, - PercentilesConfig percentilesConfig, - boolean keyed, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - PercentilesAggregatorSupplier aggregatorSupplier) throws IOException { + PercentileRanksAggregatorFactory( + String name, + ValuesSourceConfig config, + double[] percents, + PercentilesConfig percentilesConfig, + boolean keyed, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + PercentilesAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.percents = percents; this.percentilesConfig = percentilesConfig; @@ -57,18 +60,22 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { @Override protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - return percentilesConfig.createPercentileRanksAggregator(name, null, context, parent, percents, keyed, - config.format(), metadata); + return percentilesConfig.createPercentileRanksAggregator(name, null, context, parent, percents, keyed, config.format(), metadata); } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound bucketCardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build(name, config.getValuesSource(), context, parent, - percents, percentilesConfig, keyed, config.format(), metadata); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound bucketCardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build( + name, + config.getValuesSource(), + context, + parent, + percents, + percentilesConfig, + keyed, + config.format(), + metadata + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java index e8ce8842e176f..ccef0cb313238 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java @@ -8,9 +8,9 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -33,19 +33,15 @@ public class PercentilesAggregationBuilder extends AbstractPercentilesAggregatio private static final double[] DEFAULT_PERCENTS = new double[] { 1, 5, 25, 50, 75, 95, 99 }; private static final ParseField PERCENTS_FIELD = new ParseField("percents"); - public static final ConstructingObjectParser PARSER = - AbstractPercentilesAggregationBuilder.createParser( - PercentilesAggregationBuilder.NAME, - (name, values, percentileConfig) -> { - if (values == null) { - values = DEFAULT_PERCENTS; // this is needed because Percentiles has a default, while Ranks does not - } else { - values = validatePercentiles(values, name); - } - return new PercentilesAggregationBuilder(name, values, percentileConfig); - }, - PercentilesConfig.TDigest::new, - PERCENTS_FIELD); + public static final ConstructingObjectParser PARSER = AbstractPercentilesAggregationBuilder + .createParser(PercentilesAggregationBuilder.NAME, (name, values, percentileConfig) -> { + if (values == null) { + values = DEFAULT_PERCENTS; // this is needed because Percentiles has a default, while Ranks does not + } else { + values = validatePercentiles(values, name); + } + return new PercentilesAggregationBuilder(name, values, percentileConfig); + }, PercentilesConfig.TDigest::new, PERCENTS_FIELD); public static void registerAggregators(ValuesSourceRegistry.Builder builder) { PercentilesAggregatorFactory.registerAggregators(builder); @@ -63,8 +59,11 @@ public PercentilesAggregationBuilder(String name, double[] values, PercentilesCo super(name, values, percentilesConfig, PERCENTS_FIELD); } - protected PercentilesAggregationBuilder(PercentilesAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, Map metadata) { + protected PercentilesAggregationBuilder( + PercentilesAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -118,16 +117,26 @@ public double[] percentiles() { @Override protected ValuesSourceAggregatorFactory innerBuild( - AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - - PercentilesAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - - return new PercentilesAggregatorFactory(name, config, values, configOrDefault(), keyed, - context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + + PercentilesAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + + return new PercentilesAggregatorFactory( + name, + config, + values, + configOrDefault(), + keyed, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregatorFactory.java index 5c0f29af71efe..b28b33cc462e0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregatorFactory.java @@ -39,14 +39,22 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN), (name, valuesSource, context, parent, percents, percentilesConfig, keyed, formatter, metadata) -> percentilesConfig .createPercentilesAggregator(name, valuesSource, context, parent, percents, keyed, formatter, metadata), - true); + true + ); } - PercentilesAggregatorFactory(String name, ValuesSourceConfig config, double[] percents, - PercentilesConfig percentilesConfig, boolean keyed, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - PercentilesAggregatorSupplier aggregatorSupplier) throws IOException { + PercentilesAggregatorFactory( + String name, + ValuesSourceConfig config, + double[] percents, + PercentilesConfig percentilesConfig, + boolean keyed, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + PercentilesAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; this.percents = percents; @@ -55,21 +63,24 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { } @Override - protected Aggregator createUnmapped(Aggregator parent, - Map metadata) throws IOException { + protected Aggregator createUnmapped(Aggregator parent, Map metadata) throws IOException { - return percentilesConfig.createPercentilesAggregator(name, null, context, parent, percents, keyed, - config.format(), metadata); + return percentilesConfig.createPercentilesAggregator(name, null, context, parent, percents, keyed, config.format(), metadata); } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound bucketCardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build(name, config.getValuesSource(), context, parent, - percents, percentilesConfig, keyed, config.format(), metadata); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound bucketCardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build( + name, + config.getValuesSource(), + context, + parent, + percents, + percentilesConfig, + keyed, + config.format(), + metadata + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregatorSupplier.java index 87a43680455bb..5123b04fbe9e4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregatorSupplier.java @@ -16,13 +16,15 @@ import java.util.Map; public interface PercentilesAggregatorSupplier { - Aggregator build(String name, - ValuesSource valuesSource, - AggregationContext context, - Aggregator parent, - double[] percents, - PercentilesConfig percentilesConfig, - boolean keyed, - DocValueFormat formatter, - Map metadata) throws IOException; + Aggregator build( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] percents, + PercentilesConfig percentilesConfig, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java index 1ce70d5c93a29..76ca4462c3ec9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java @@ -116,8 +116,7 @@ public TDigest(double compression) { public void setCompression(double compression) { if (compression < 0.0) { - throw new IllegalArgumentException( - "[compression] must be greater than or equal to 0. Found [" + compression + "]"); + throw new IllegalArgumentException("[compression] must be greater than or equal to 0. Found [" + compression + "]"); } this.compression = compression; } @@ -127,18 +126,41 @@ public double getCompression() { } @Override - public Aggregator createPercentilesAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, - double[] values, boolean keyed, DocValueFormat formatter, - Map metadata) throws IOException { + public Aggregator createPercentilesAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] values, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { return new TDigestPercentilesAggregator(name, valuesSource, context, parent, values, compression, keyed, formatter, metadata); } @Override - Aggregator createPercentileRanksAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, - double[] values, boolean keyed, DocValueFormat formatter, - Map metadata) throws IOException { - return new TDigestPercentileRanksAggregator(name, valuesSource, context, parent, values, compression, keyed, - formatter, metadata); + Aggregator createPercentileRanksAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] values, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { + return new TDigestPercentileRanksAggregator( + name, + valuesSource, + context, + parent, + values, + compression, + keyed, + formatter, + metadata + ); } @Override @@ -200,19 +222,51 @@ public int getNumberOfSignificantValueDigits() { } @Override - public Aggregator createPercentilesAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, - double[] values, boolean keyed, DocValueFormat formatter, - Map metadata) throws IOException { - return new HDRPercentilesAggregator(name, valuesSource, context, parent, values, numberOfSignificantValueDigits, keyed, - formatter, metadata); + public Aggregator createPercentilesAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] values, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { + return new HDRPercentilesAggregator( + name, + valuesSource, + context, + parent, + values, + numberOfSignificantValueDigits, + keyed, + formatter, + metadata + ); } @Override - Aggregator createPercentileRanksAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, - double[] values, boolean keyed, DocValueFormat formatter, - Map metadata) throws IOException { - return new HDRPercentileRanksAggregator(name, valuesSource, context, parent, values, numberOfSignificantValueDigits, keyed, - formatter, metadata); + Aggregator createPercentileRanksAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] values, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { + return new HDRPercentileRanksAggregator( + name, + valuesSource, + context, + parent, + values, + numberOfSignificantValueDigits, + keyed, + formatter, + metadata + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethod.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethod.java index 769a790de55e6..dc1664a368ade 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethod.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethod.java @@ -8,11 +8,11 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java index 45d719800dfab..47c64c4c9467e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptedMetricAggContexts; @@ -37,12 +37,15 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder private static final ParseField REDUCE_SCRIPT_FIELD = new ParseField("reduce_script"); private static final ParseField PARAMS_FIELD = new ParseField("params"); - public static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>(NAME, false, (args, name) -> { - ScriptedMetricAggregationBuilder builder = new ScriptedMetricAggregationBuilder(name); - builder.mapScript((Script) args[0]); - return builder; - }); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + false, + (args, name) -> { + ScriptedMetricAggregationBuilder builder = new ScriptedMetricAggregationBuilder(name); + builder.mapScript((Script) args[0]); + return builder; + } + ); static { Script.declareScript(PARSER, ScriptedMetricAggregationBuilder::initScript, INIT_SCRIPT_FIELD); Script.declareScript(PARSER, constructorArg(), MAP_SCRIPT_FIELD); @@ -61,8 +64,11 @@ public ScriptedMetricAggregationBuilder(String name) { super(name); } - protected ScriptedMetricAggregationBuilder(ScriptedMetricAggregationBuilder clone, - Builder factoriesBuilder, Map metadata) { + protected ScriptedMetricAggregationBuilder( + ScriptedMetricAggregationBuilder clone, + Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.initScript = clone.initScript; this.mapScript = clone.mapScript; @@ -201,14 +207,14 @@ public BucketCardinality bucketCardinality() { } @Override - protected ScriptedMetricAggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, - Builder subfactoriesBuilder) throws IOException { + protected ScriptedMetricAggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, Builder subfactoriesBuilder) + throws IOException { if (combineScript == null) { throw new IllegalArgumentException("[combineScript] must not be null: [" + name + "]"); } - if(reduceScript == null) { + if (reduceScript == null) { throw new IllegalArgumentException("[reduceScript] must not be null: [" + name + "]"); } @@ -225,21 +231,35 @@ protected ScriptedMetricAggregatorFactory doBuild(AggregationContext context, Ag initScriptParams = Collections.emptyMap(); } - ScriptedMetricAggContexts.MapScript.Factory compiledMapScript = context.compile(mapScript, - ScriptedMetricAggContexts.MapScript.CONTEXT); + ScriptedMetricAggContexts.MapScript.Factory compiledMapScript = context.compile( + mapScript, + ScriptedMetricAggContexts.MapScript.CONTEXT + ); Map mapScriptParams = mapScript.getParams(); - - ScriptedMetricAggContexts.CombineScript.Factory compiledCombineScript = context.compile(combineScript, - ScriptedMetricAggContexts.CombineScript.CONTEXT); + ScriptedMetricAggContexts.CombineScript.Factory compiledCombineScript = context.compile( + combineScript, + ScriptedMetricAggContexts.CombineScript.CONTEXT + ); Map combineScriptParams = combineScript.getParams(); - return new ScriptedMetricAggregatorFactory(name, compiledMapScript, mapScriptParams, compiledInitScript, - initScriptParams, compiledCombineScript, combineScriptParams, reduceScript, - params, context, parent, subfactoriesBuilder, metadata); + return new ScriptedMetricAggregatorFactory( + name, + compiledMapScript, + mapScriptParams, + compiledInitScript, + initScriptParams, + compiledCombineScript, + combineScriptParams, + reduceScript, + params, + context, + parent, + subfactoriesBuilder, + metadata + ); } - @Override protected XContentBuilder internalXContent(XContentBuilder builder, Params builderParams) throws IOException { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java index dff57517fecdb..0365c65053221 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java @@ -11,11 +11,11 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.ScriptedMetricAggContexts.MapScript; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java index c8e7205366476..19840f47e4dba 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java @@ -111,15 +111,21 @@ static T deepCopyParams(T original) { clonedList.add(deepCopyParams(o)); } clone = (T) clonedList; - } else if (original instanceof String || original instanceof Integer || original instanceof Long || original instanceof Short - || original instanceof Byte || original instanceof Float || original instanceof Double || original instanceof Character + } else if (original instanceof String + || original instanceof Integer + || original instanceof Long + || original instanceof Short + || original instanceof Byte + || original instanceof Float + || original instanceof Double + || original instanceof Character || original instanceof Boolean) { - clone = original; - } else { - throw new IllegalArgumentException( - "Can only clone primitives, String, ArrayList, and HashMap. Found: " + original.getClass().getCanonicalName() - ); - } + clone = original; + } else { + throw new IllegalArgumentException( + "Can only clone primitives, String, ArrayList, and HashMap. Found: " + original.getClass().getCanonicalName() + ); + } return clone; } @@ -130,12 +136,12 @@ static Map mergeParams(Map agg, Map aggEntry : agg.entrySet()) { if (combined.putIfAbsent(aggEntry.getKey(), aggEntry.getValue()) != null) { - throw new IllegalArgumentException("Parameter name \"" + aggEntry.getKey() + - "\" used in both aggregation and script parameters"); + throw new IllegalArgumentException( + "Parameter name \"" + aggEntry.getKey() + "\" used in both aggregation and script parameters" + ); } } return combined; } } - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Stats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Stats.java index 8896651b8b8ce..4bdcbd041d542 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Stats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Stats.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.search.aggregations.metrics; - /** * Statistics over a set of values (either aggregated over field data or scripts) */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java index 7dacfceecd09d..73a526c4ab81e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java @@ -35,8 +35,7 @@ public class StatsAggregationBuilder extends ValuesSourceAggregationBuilder.Leaf MetricAggregatorSupplier.class ); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, StatsAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder(NAME, StatsAggregationBuilder::new); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); } @@ -45,8 +44,11 @@ public StatsAggregationBuilder(String name) { super(name); } - protected StatsAggregationBuilder(StatsAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, Map metadata) { + protected StatsAggregationBuilder( + StatsAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -77,11 +79,13 @@ protected void innerWriteTo(StreamOutput out) { } @Override - protected StatsAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - MetricAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + protected StatsAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + MetricAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); return new StatsAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregator.java index 03094ca178b63..c6aa5521d7693 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregator.java @@ -9,10 +9,10 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -65,8 +65,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } @@ -127,22 +126,32 @@ public boolean hasMetric(String name) { @Override public double metric(String name, long owningBucketOrd) { if (valuesSource == null || owningBucketOrd >= counts.size()) { - switch(InternalStats.Metrics.resolve(name)) { - case count: return 0; - case sum: return 0; - case min: return Double.POSITIVE_INFINITY; - case max: return Double.NEGATIVE_INFINITY; - case avg: return Double.NaN; + switch (InternalStats.Metrics.resolve(name)) { + case count: + return 0; + case sum: + return 0; + case min: + return Double.POSITIVE_INFINITY; + case max: + return Double.NEGATIVE_INFINITY; + case avg: + return Double.NaN; default: throw new IllegalArgumentException("Unknown value [" + name + "] in common stats aggregation"); } } - switch(InternalStats.Metrics.resolve(name)) { - case count: return counts.get(owningBucketOrd); - case sum: return sums.get(owningBucketOrd); - case min: return mins.get(owningBucketOrd); - case max: return maxes.get(owningBucketOrd); - case avg: return sums.get(owningBucketOrd) / counts.get(owningBucketOrd); + switch (InternalStats.Metrics.resolve(name)) { + case count: + return counts.get(owningBucketOrd); + case sum: + return sums.get(owningBucketOrd); + case min: + return mins.get(owningBucketOrd); + case max: + return maxes.get(owningBucketOrd); + case avg: + return sums.get(owningBucketOrd) / counts.get(owningBucketOrd); default: throw new IllegalArgumentException("Unknown value [" + name + "] in common stats aggregation"); } @@ -153,8 +162,7 @@ public InternalAggregation buildAggregation(long bucket) { if (valuesSource == null || bucket >= sums.size()) { return buildEmptyAggregation(); } - return new InternalStats(name, counts.get(bucket), sums.get(bucket), mins.get(bucket), - maxes.get(bucket), format, metadata()); + return new InternalStats(name, counts.get(bucket), sums.get(bucket), mins.get(bucket), maxes.get(bucket), format, metadata()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorFactory.java index 43a856f8c260a..2d311c9e05408 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorFactory.java @@ -26,13 +26,15 @@ class StatsAggregatorFactory extends ValuesSourceAggregatorFactory { private final MetricAggregatorSupplier aggregatorSupplier; - StatsAggregatorFactory(String name, - ValuesSourceConfig config, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - MetricAggregatorSupplier aggregatorSupplier) throws IOException { + StatsAggregatorFactory( + String name, + ValuesSourceConfig config, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + MetricAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; @@ -43,7 +45,8 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { StatsAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN), StatsAggregator::new, - true); + true + ); } @Override @@ -52,11 +55,8 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { return aggregatorSupplier.build(name, config, context, parent, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java index 255891b84f5ab..3c0c171bad3e5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java @@ -46,9 +46,11 @@ public SumAggregationBuilder(String name) { super(name); } - protected SumAggregationBuilder(SumAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + protected SumAggregationBuilder( + SumAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -75,13 +77,14 @@ protected void innerWriteTo(StreamOutput out) { } @Override - protected SumAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - MetricAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new SumAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metadata, - aggregatorSupplier); + protected SumAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + MetricAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new SumAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregator.java index 40915d79df975..9b1d22aa80e98 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregator.java @@ -9,8 +9,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -55,8 +55,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorFactory.java index daa683d1f264a..a14764a5ca25e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorFactory.java @@ -26,13 +26,15 @@ class SumAggregatorFactory extends ValuesSourceAggregatorFactory { private final MetricAggregatorSupplier aggregatorSupplier; - SumAggregatorFactory(String name, - ValuesSourceConfig config, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - MetricAggregatorSupplier aggregatorSupplier) throws IOException { + SumAggregatorFactory( + String name, + ValuesSourceConfig config, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + MetricAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; @@ -43,7 +45,8 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { SumAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN), SumAggregator::new, - true); + true + ); } @Override @@ -52,12 +55,8 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound bucketCardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build(name, config, context, parent, metadata); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound bucketCardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build(name, config, context, parent, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregator.java index ca5eb6cc6c186..3c0ee93113a49 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregator.java @@ -18,15 +18,17 @@ class TDigestPercentileRanksAggregator extends AbstractTDigestPercentilesAggregator { - TDigestPercentileRanksAggregator(String name, - ValuesSource valuesSource, - AggregationContext context, - Aggregator parent, - double[] percents, - double compression, - boolean keyed, - DocValueFormat formatter, - Map metadata) throws IOException { + TDigestPercentileRanksAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] percents, + double compression, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { super(name, valuesSource, context, parent, percents, compression, keyed, formatter, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregator.java index 4f20921a50136..65cc1bff01e67 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregator.java @@ -18,15 +18,17 @@ class TDigestPercentilesAggregator extends AbstractTDigestPercentilesAggregator { - TDigestPercentilesAggregator(String name, - ValuesSource valuesSource, - AggregationContext context, - Aggregator parent, - double[] percents, - double compression, - boolean keyed, - DocValueFormat formatter, - Map metadata) throws IOException { + TDigestPercentilesAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] percents, + double compression, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { super(name, valuesSource, context, parent, percents, compression, keyed, formatter, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java index 9affff31550da..e5a878e369f69 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java @@ -9,6 +9,7 @@ import com.tdunning.math.stats.AVLTreeDigest; import com.tdunning.math.stats.Centroid; + import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index ad8a73229a88d..c40ddfb49536a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -9,13 +9,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.Version; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.script.FieldScript; import org.elasticsearch.script.Script; @@ -69,8 +69,7 @@ public TopHitsAggregationBuilder(String name) { super(name); } - protected TopHitsAggregationBuilder(TopHitsAggregationBuilder clone, - Builder factoriesBuilder, Map metadata) { + protected TopHitsAggregationBuilder(TopHitsAggregationBuilder clone, Builder factoriesBuilder, Map metadata) { super(clone, factoriesBuilder, metadata); this.from = clone.from; this.size = clone.size; @@ -79,16 +78,20 @@ protected TopHitsAggregationBuilder(TopHitsAggregationBuilder clone, this.seqNoAndPrimaryTerm = clone.seqNoAndPrimaryTerm; this.trackScores = clone.trackScores; this.sorts = clone.sorts == null ? null : new ArrayList<>(clone.sorts); - this.highlightBuilder = clone.highlightBuilder == null ? null : - new HighlightBuilder(clone.highlightBuilder, clone.highlightBuilder.highlightQuery(), clone.highlightBuilder.fields()); - this.storedFieldsContext = clone.storedFieldsContext == null ? null : - new StoredFieldsContext(clone.storedFieldsContext); + this.highlightBuilder = clone.highlightBuilder == null + ? null + : new HighlightBuilder(clone.highlightBuilder, clone.highlightBuilder.highlightQuery(), clone.highlightBuilder.fields()); + this.storedFieldsContext = clone.storedFieldsContext == null ? null : new StoredFieldsContext(clone.storedFieldsContext); this.docValueFields = clone.docValueFields == null ? null : new ArrayList<>(clone.docValueFields); this.fetchFields = clone.fetchFields == null ? null : new ArrayList<>(clone.fetchFields); this.scriptFields = clone.scriptFields == null ? null : new HashSet<>(clone.scriptFields); - this.fetchSourceContext = clone.fetchSourceContext == null ? null : - new FetchSourceContext(clone.fetchSourceContext.fetchSource(), clone.fetchSourceContext.includes(), - clone.fetchSourceContext.excludes()); + this.fetchSourceContext = clone.fetchSourceContext == null + ? null + : new FetchSourceContext( + clone.fetchSourceContext.fetchSource(), + clone.fetchSourceContext.includes(), + clone.fetchSourceContext.excludes() + ); } @Override @@ -257,7 +260,7 @@ public TopHitsAggregationBuilder sort(SortBuilder sort) { throw new IllegalArgumentException("[sort] must not be null: [" + name + "]"); } if (sorts == null) { - sorts = new ArrayList<>(); + sorts = new ArrayList<>(); } sorts.add(sort); return this; @@ -309,8 +312,7 @@ public HighlightBuilder highlighter() { * every hit */ public TopHitsAggregationBuilder fetchSource(boolean fetch) { - FetchSourceContext fetchSourceContext = this.fetchSourceContext != null ? this.fetchSourceContext - : FetchSourceContext.FETCH_SOURCE; + FetchSourceContext fetchSourceContext = this.fetchSourceContext != null ? this.fetchSourceContext : FetchSourceContext.FETCH_SOURCE; this.fetchSourceContext = new FetchSourceContext(fetch, fetchSourceContext.includes(), fetchSourceContext.excludes()); return this; } @@ -328,8 +330,10 @@ public TopHitsAggregationBuilder fetchSource(boolean fetch) { * filter the returned _source */ public TopHitsAggregationBuilder fetchSource(@Nullable String include, @Nullable String exclude) { - fetchSource(include == null ? Strings.EMPTY_ARRAY : new String[] { include }, - exclude == null ? Strings.EMPTY_ARRAY : new String[] { exclude }); + fetchSource( + include == null ? Strings.EMPTY_ARRAY : new String[] { include }, + exclude == null ? Strings.EMPTY_ARRAY : new String[] { exclude } + ); return this; } @@ -346,8 +350,7 @@ public TopHitsAggregationBuilder fetchSource(@Nullable String include, @Nullable * pattern to filter the returned _source */ public TopHitsAggregationBuilder fetchSource(@Nullable String[] includes, @Nullable String[] excludes) { - FetchSourceContext fetchSourceContext = this.fetchSourceContext != null ? this.fetchSourceContext - : FetchSourceContext.FETCH_SOURCE; + FetchSourceContext fetchSourceContext = this.fetchSourceContext != null ? this.fetchSourceContext : FetchSourceContext.FETCH_SOURCE; this.fetchSourceContext = new FetchSourceContext(fetchSourceContext.fetchSource(), includes, excludes); return this; } @@ -588,8 +591,9 @@ public boolean trackScores() { @Override public TopHitsAggregationBuilder subAggregations(Builder subFactories) { - throw new AggregationInitializationException("Aggregator [" + name + "] of type [" - + getType() + "] cannot accept sub-aggregations"); + throw new AggregationInitializationException( + "Aggregator [" + name + "] of type [" + getType() + "] cannot accept sub-aggregations" + ); } @Override @@ -599,15 +603,21 @@ public BucketCardinality bucketCardinality() { @Override protected TopHitsAggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, Builder subfactoriesBuilder) - throws IOException { + throws IOException { long innerResultWindow = from() + size(); int maxInnerResultWindow = context.getIndexSettings().getMaxInnerResultWindow(); if (innerResultWindow > maxInnerResultWindow) { throw new IllegalArgumentException( - "Top hits result window is too large, the top hits aggregator [" + name + "]'s from + size must be less " + - "than or equal to: [" + maxInnerResultWindow + "] but was [" + innerResultWindow + - "]. This limit can be set by changing the [" + IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey() + - "] index level setting." + "Top hits result window is too large, the top hits aggregator [" + + name + + "]'s from + size must be less " + + "than or equal to: [" + + maxInnerResultWindow + + "] but was [" + + innerResultWindow + + "]. This limit can be set by changing the [" + + IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey() + + "] index level setting." ); } @@ -616,8 +626,13 @@ protected TopHitsAggregatorFactory doBuild(AggregationContext context, Aggregato for (ScriptField field : this.scriptFields) { FieldScript.Factory factory = context.compile(field.script(), FieldScript.CONTEXT); FieldScript.LeafFactory searchScript = factory.newFactory(field.script().getParams(), context.lookup()); - scriptFields.add(new org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField( - field.fieldName(), searchScript, field.ignoreFailure())); + scriptFields.add( + new org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField( + field.fieldName(), + searchScript, + field.ignoreFailure() + ) + ); } } @@ -627,9 +642,26 @@ protected TopHitsAggregatorFactory doBuild(AggregationContext context, Aggregato } else { optionalSort = context.buildSort(sorts); } - return new TopHitsAggregatorFactory(name, from, size, explain, version, seqNoAndPrimaryTerm, trackScores, optionalSort, - highlightBuilder, storedFieldsContext, docValueFields, fetchFields, scriptFields, fetchSourceContext, context, parent, - subfactoriesBuilder, metadata); + return new TopHitsAggregatorFactory( + name, + from, + size, + explain, + version, + seqNoAndPrimaryTerm, + trackScores, + optionalSort, + highlightBuilder, + storedFieldsContext, + docValueFields, + fetchFields, + scriptFields, + fetchSourceContext, + context, + parent, + subfactoriesBuilder, + metadata + ); } @Override @@ -673,7 +705,7 @@ protected XContentBuilder internalXContent(XContentBuilder builder, Params param if (sorts != null) { builder.startArray(SearchSourceBuilder.SORT_FIELD.getPreferredName()); for (SortBuilder sort : sorts) { - sort.toXContent(builder, params); + sort.toXContent(builder, params); } builder.endArray(); } @@ -710,13 +742,18 @@ public static TopHitsAggregationBuilder parse(String aggregationName, XContentPa } else if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { factory.fetchSource(FetchSourceContext.fromXContent(parser)); } else if (SearchSourceBuilder.STORED_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - factory.storedFieldsContext = - StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), parser); + factory.storedFieldsContext = StoredFieldsContext.fromXContent( + SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), + parser + ); } else if (SearchSourceBuilder.SORT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { factory.sort(parser.text()); } else { - throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation()); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + currentFieldName + "].", + parser.getTokenLocation() + ); } } else if (token == XContentParser.Token.START_OBJECT) { if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -735,31 +772,49 @@ public static TopHitsAggregationBuilder parse(String aggregationName, XContentPa } else if (token.isValue()) { if (SearchSourceBuilder.SCRIPT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { script = Script.parse(parser); - } else if (SearchSourceBuilder.IGNORE_FAILURE_FIELD.match(currentFieldName, - parser.getDeprecationHandler())) { + } else if (SearchSourceBuilder.IGNORE_FAILURE_FIELD.match( + currentFieldName, + parser.getDeprecationHandler() + )) { ignoreFailure = parser.booleanValue(); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation()); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + currentFieldName + "].", + parser.getTokenLocation() + ); } } else if (token == XContentParser.Token.START_OBJECT) { if (SearchSourceBuilder.SCRIPT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { script = Script.parse(parser); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation()); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + currentFieldName + "].", + parser.getTokenLocation() + ); } } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + currentFieldName + "].", + parser.getTokenLocation() + ); } } scriptFields.add(new ScriptField(scriptFieldName, script, ignoreFailure)); } else { - throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT - + "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation()); + throw new ParsingException( + parser.getTokenLocation(), + "Expected [" + + XContentParser.Token.START_OBJECT + + "] in [" + + currentFieldName + + "] but found [" + + token + + "]", + parser.getTokenLocation() + ); } } factory.scriptFields(scriptFields); @@ -769,14 +824,19 @@ public static TopHitsAggregationBuilder parse(String aggregationName, XContentPa List> sorts = SortBuilder.fromXContent(parser); factory.sorts(sorts); } else { - throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation()); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + currentFieldName + "].", + parser.getTokenLocation() + ); } } else if (token == XContentParser.Token.START_ARRAY) { if (SearchSourceBuilder.STORED_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - factory.storedFieldsContext = - StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), parser); + factory.storedFieldsContext = StoredFieldsContext.fromXContent( + SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), + parser + ); } else if (SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { FieldAndFormat ff = FieldAndFormat.fromXContent(parser); @@ -793,12 +853,18 @@ public static TopHitsAggregationBuilder parse(String aggregationName, XContentPa } else if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { factory.fetchSource(FetchSourceContext.fromXContent(parser)); } else { - throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation()); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + currentFieldName + "].", + parser.getTokenLocation() + ); } } else { - throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation()); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + currentFieldName + "].", + parser.getTokenLocation() + ); } } return factory; @@ -810,25 +876,39 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; TopHitsAggregationBuilder that = (TopHitsAggregationBuilder) o; - return from == that.from && - size == that.size && - explain == that.explain && - version == that.version && - seqNoAndPrimaryTerm == that.seqNoAndPrimaryTerm && - trackScores == that.trackScores && - Objects.equals(sorts, that.sorts) && - Objects.equals(highlightBuilder, that.highlightBuilder) && - Objects.equals(storedFieldsContext, that.storedFieldsContext) && - Objects.equals(docValueFields, that.docValueFields) && - Objects.equals(fetchFields, that.fetchFields) && - Objects.equals(scriptFields, that.scriptFields) && - Objects.equals(fetchSourceContext, that.fetchSourceContext); + return from == that.from + && size == that.size + && explain == that.explain + && version == that.version + && seqNoAndPrimaryTerm == that.seqNoAndPrimaryTerm + && trackScores == that.trackScores + && Objects.equals(sorts, that.sorts) + && Objects.equals(highlightBuilder, that.highlightBuilder) + && Objects.equals(storedFieldsContext, that.storedFieldsContext) + && Objects.equals(docValueFields, that.docValueFields) + && Objects.equals(fetchFields, that.fetchFields) + && Objects.equals(scriptFields, that.scriptFields) + && Objects.equals(fetchSourceContext, that.fetchSourceContext); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), from, size, explain, version, seqNoAndPrimaryTerm, trackScores, sorts, highlightBuilder, - storedFieldsContext, docValueFields, fetchFields, scriptFields, fetchSourceContext); + return Objects.hash( + super.hashCode(), + from, + size, + explain, + version, + seqNoAndPrimaryTerm, + trackScores, + sorts, + highlightBuilder, + storedFieldsContext, + docValueFields, + fetchFields, + scriptFields, + fetchSourceContext + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index a10354de90705..741d6787f3e5e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -27,10 +27,10 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.MaxScoreCollector; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.LongObjectPagedHashMap; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregator; @@ -63,8 +63,13 @@ private static class Collectors { private final SubSearchContext subSearchContext; private final LongObjectPagedHashMap topDocsCollectors; - TopHitsAggregator(SubSearchContext subSearchContext, String name, AggregationContext context, - Aggregator parent, Map metadata) throws IOException { + TopHitsAggregator( + SubSearchContext subSearchContext, + String name, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, context, parent, metadata); topDocsCollectors = new LongObjectPagedHashMap<>(1, context.bigArrays()); this.subSearchContext = subSearchContext; @@ -121,8 +126,9 @@ public void collect(int docId, long bucket) throws IOException { // TODO: can we pass trackTotalHits=subSearchContext.trackTotalHits(){ // Note that this would require to catch CollectionTerminatedException collectors = new Collectors( - TopFieldCollector.create(sort.sort, topN, Integer.MAX_VALUE), - subSearchContext.trackScores() ? new MaxScoreCollector() : null); + TopFieldCollector.create(sort.sort, topN, Integer.MAX_VALUE), + subSearchContext.trackScores() ? new MaxScoreCollector() : null + ); } topDocsCollectors.put(bucket, collectors); } @@ -168,8 +174,8 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE maxScore = collectors.maxScoreCollector.getMaxScore(); } final TopDocsAndMaxScore topDocsAndMaxScore = new TopDocsAndMaxScore(topDocs, maxScore); - subSearchContext.queryResult().topDocs(topDocsAndMaxScore, - subSearchContext.sort() == null ? null : subSearchContext.sort().formats); + subSearchContext.queryResult() + .topDocs(topDocsAndMaxScore, subSearchContext.sort() == null ? null : subSearchContext.sort().formats); int[] docIdsToLoad = new int[topDocs.scoreDocs.length]; for (int i = 0; i < topDocs.scoreDocs.length; i++) { docIdsToLoad[i] = topDocs.scoreDocs[i].doc; @@ -188,21 +194,36 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE searchHitFields.sortValues(fieldDoc.fields, subSearchContext.sort().formats); } } - return new InternalTopHits(name, subSearchContext.from(), subSearchContext.size(), topDocsAndMaxScore, fetchResult.hits(), - metadata()); + return new InternalTopHits( + name, + subSearchContext.from(), + subSearchContext.size(), + topDocsAndMaxScore, + fetchResult.hits(), + metadata() + ); } @Override public InternalTopHits buildEmptyAggregation() { TopDocs topDocs; if (subSearchContext.sort() != null) { - topDocs = new TopFieldDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new FieldDoc[0], - subSearchContext.sort().sort.getSort()); + topDocs = new TopFieldDocs( + new TotalHits(0, TotalHits.Relation.EQUAL_TO), + new FieldDoc[0], + subSearchContext.sort().sort.getSort() + ); } else { topDocs = Lucene.EMPTY_TOP_DOCS; } - return new InternalTopHits(name, subSearchContext.from(), subSearchContext.size(), new TopDocsAndMaxScore(topDocs, Float.NaN), - SearchHits.empty(), metadata()); + return new InternalTopHits( + name, + subSearchContext.from(), + subSearchContext.size(), + new TopDocsAndMaxScore(topDocs, Float.NaN), + SearchHits.empty(), + metadata() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java index 9c94b929e9458..9054372fc3f3f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java @@ -44,24 +44,26 @@ class TopHitsAggregatorFactory extends AggregatorFactory { private final List scriptFields; private final FetchSourceContext fetchSourceContext; - TopHitsAggregatorFactory(String name, - int from, - int size, - boolean explain, - boolean version, - boolean seqNoAndPrimaryTerm, - boolean trackScores, - Optional sort, - HighlightBuilder highlightBuilder, - StoredFieldsContext storedFieldsContext, - List docValueFields, - List fetchFields, - List scriptFields, - FetchSourceContext fetchSourceContext, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactories, - Map metadata) throws IOException { + TopHitsAggregatorFactory( + String name, + int from, + int size, + boolean explain, + boolean version, + boolean seqNoAndPrimaryTerm, + boolean trackScores, + Optional sort, + HighlightBuilder highlightBuilder, + StoredFieldsContext storedFieldsContext, + List docValueFields, + List fetchFields, + List scriptFields, + FetchSourceContext fetchSourceContext, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactories, + Map metadata + ) throws IOException { super(name, context, parent, subFactories, metadata); this.from = from; this.size = size; @@ -96,7 +98,9 @@ public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardin } if (docValueFields != null) { FetchDocValuesContext docValuesContext = new FetchDocValuesContext( - subSearchContext.getSearchExecutionContext(), docValueFields); + subSearchContext.getSearchExecutionContext(), + docValueFields + ); subSearchContext.docValuesContext(docValuesContext); } if (fetchFields != null) { @@ -105,7 +109,7 @@ public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardin } for (ScriptFieldsContext.ScriptField field : scriptFields) { subSearchContext.scriptFields().add(field); - } + } if (fetchSourceContext != null) { subSearchContext.fetchSourceContext(fetchSourceContext); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java index a6ea7def2ac42..42f6949c2988c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java @@ -34,8 +34,10 @@ public class ValueCountAggregationBuilder extends ValuesSourceAggregationBuilder MetricAggregatorSupplier.class ); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, ValueCountAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + ValueCountAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); } @@ -48,8 +50,11 @@ public ValueCountAggregationBuilder(String name) { super(name); } - protected ValueCountAggregationBuilder(ValueCountAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, Map metadata) { + protected ValueCountAggregationBuilder( + ValueCountAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -81,12 +86,13 @@ protected boolean serializeTargetValueType(Version version) { } @Override - protected ValueCountAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - MetricAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + protected ValueCountAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + MetricAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); return new ValueCountAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java index bfc944404e32f..622b8928929b3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java @@ -10,8 +10,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.aggregations.Aggregator; @@ -39,11 +39,12 @@ public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue { LongArray counts; public ValueCountAggregator( - String name, - ValuesSourceConfig valuesSourceConfig, - AggregationContext aggregationContext, - Aggregator parent, - Map metadata) throws IOException { + String name, + ValuesSourceConfig valuesSourceConfig, + AggregationContext aggregationContext, + Aggregator parent, + Map metadata + ) throws IOException { super(name, aggregationContext, parent, metadata); // TODO: stop expecting nulls here this.valuesSource = valuesSourceConfig.hasValues() ? valuesSourceConfig.getValuesSource() : null; @@ -53,14 +54,13 @@ public ValueCountAggregator( } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } if (valuesSource instanceof ValuesSource.Numeric) { - final SortedNumericDocValues values = ((ValuesSource.Numeric)valuesSource).longValues(ctx); + final SortedNumericDocValues values = ((ValuesSource.Numeric) valuesSource).longValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override @@ -73,7 +73,7 @@ public void collect(int doc, long bucket) throws IOException { }; } if (valuesSource instanceof ValuesSource.Bytes.GeoPoint) { - MultiGeoPointValues values = ((ValuesSource.GeoPoint)valuesSource).geoPointValues(ctx); + MultiGeoPointValues values = ((ValuesSource.GeoPoint) valuesSource).geoPointValues(ctx); return new LeafBucketCollectorBase(sub, null) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorFactory.java index 06b69762a7758..fa6fe0a6ad0ea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorFactory.java @@ -29,10 +29,15 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { builder.register(ValueCountAggregationBuilder.REGISTRY_KEY, CoreValuesSourceType.ALL_CORE, ValueCountAggregator::new, true); } - ValueCountAggregatorFactory(String name, ValuesSourceConfig config, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - MetricAggregatorSupplier aggregatorSupplier) throws IOException { + ValueCountAggregatorFactory( + String name, + ValuesSourceConfig config, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + MetricAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; @@ -44,12 +49,8 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound bucketCardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build(name, config, context, parent, metadata); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound bucketCardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build(name, config, context, parent, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java index 91aeff01c5006..22e67d7fbc849 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -39,8 +39,10 @@ public class WeightedAvgAggregationBuilder extends MultiValuesSourceAggregationB public static final ParseField VALUE_FIELD = new ParseField("value"); public static final ParseField WEIGHT_FIELD = new ParseField("weight"); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, WeightedAvgAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + WeightedAvgAggregationBuilder::new + ); static { MultiValuesSourceParseHelper.declareCommon(PARSER, true, ValueType.NUMERIC); MultiValuesSourceParseHelper.declareField(VALUE_FIELD.getPreferredName(), PARSER, true, false, false, false); @@ -99,12 +101,14 @@ public BucketCardinality bucketCardinality() { } @Override - protected MultiValuesSourceAggregatorFactory innerBuild(AggregationContext context, - Map configs, - Map filters, - DocValueFormat format, - AggregatorFactory parent, - Builder subFactoriesBuilder) throws IOException { + protected MultiValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + Map configs, + Map filters, + DocValueFormat format, + AggregatorFactory parent, + Builder subFactoriesBuilder + ) throws IOException { return new WeightedAvgAggregatorFactory(name, configs, format, context, parent, subFactoriesBuilder, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregator.java index 5b198f60933a0..06030134e62d6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregator.java @@ -9,8 +9,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionException; @@ -37,8 +37,14 @@ class WeightedAvgAggregator extends NumericMetricsAggregator.SingleValue { private DoubleArray weightCompensations; private DocValueFormat format; - WeightedAvgAggregator(String name, MultiValuesSource.NumericMultiValuesSource valuesSources, DocValueFormat format, - AggregationContext context, Aggregator parent, Map metadata) throws IOException { + WeightedAvgAggregator( + String name, + MultiValuesSource.NumericMultiValuesSource valuesSources, + DocValueFormat format, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, context, parent, metadata); this.valuesSources = valuesSources; this.format = format; @@ -56,8 +62,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSources == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } @@ -76,8 +81,10 @@ public void collect(int doc, long bucket) throws IOException { if (docValues.advanceExact(doc) && docWeights.advanceExact(doc)) { if (docWeights.docValueCount() > 1) { - throw new AggregationExecutionException("Encountered more than one weight for a " + - "single document. Use a script to combine multiple weights-per-doc into a single value."); + throw new AggregationExecutionException( + "Encountered more than one weight for a " + + "single document. Use a script to combine multiple weights-per-doc into a single value." + ); } // There should always be one weight if advanceExact lands us here, either // a real weight or a `missing` weight @@ -109,7 +116,6 @@ public void collect(int doc, long bucket) throws IOException { }; } - @Override public double metric(long owningBucketOrd) { if (valuesSources == null || owningBucketOrd >= valueSums.size()) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorFactory.java index 28f620ffc799d..ed4f85657ea8a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorFactory.java @@ -25,10 +25,15 @@ class WeightedAvgAggregatorFactory extends MultiValuesSourceAggregatorFactory { - WeightedAvgAggregatorFactory(String name, Map configs, - DocValueFormat format, AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata) throws IOException { + WeightedAvgAggregatorFactory( + String name, + Map configs, + DocValueFormat format, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { super(name, configs, format, context, parent, subFactoriesBuilder, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/package-info.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/package-info.java index 128ab14382891..37e6be20379a7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/package-info.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/package-info.java @@ -10,4 +10,3 @@ * Aggregations module */ package org.elasticsearch.search.aggregations.metrics; - diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java index f71d9b3422771..0e6a4e529a6e1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java @@ -7,9 +7,9 @@ */ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; @@ -21,8 +21,8 @@ /** * Base implementation of a {@link PipelineAggregationBuilder}. */ -public abstract class AbstractPipelineAggregationBuilder> - extends PipelineAggregationBuilder { +public abstract class AbstractPipelineAggregationBuilder> extends + PipelineAggregationBuilder { /** * Field shared by many parsers. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java index 3f99e85284fb1..d997cfe7ef580 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java @@ -46,8 +46,11 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() { @Override - protected AvgBucketPipelineAggregationBuilder buildFactory(String pipelineAggregatorName, - String bucketsPath, Map params) { + protected AvgBucketPipelineAggregationBuilder buildFactory( + String pipelineAggregatorName, + String bucketsPath, + Map params + ) { return new AvgBucketPipelineAggregationBuilder(pipelineAggregatorName, bucketsPath); } }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregator.java index dba9f8c97bf3a..f81180a151351 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregator.java @@ -18,8 +18,13 @@ public class AvgBucketPipelineAggregator extends BucketMetricsPipelineAggregator private int count = 0; private double sum = 0; - AvgBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat format, - Map metadata) { + AvgBucketPipelineAggregator( + String name, + String[] bucketsPaths, + GapPolicy gapPolicy, + DocValueFormat format, + Map metadata + ) { super(name, bucketsPaths, gapPolicy, format, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpers.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpers.java index 6bbf6b510e368..29e2d6f8438d3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpers.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpers.java @@ -8,12 +8,12 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; @@ -90,8 +90,13 @@ public static GapPolicy parse(String text, XContentLocation tokenLocation) { if (result == null) { result = policy; } else { - throw new IllegalStateException("Text can be parsed to 2 different gap policies: text=[" + text - + "], " + "policies=" + Arrays.asList(result, policy)); + throw new IllegalStateException( + "Text can be parsed to 2 different gap policies: text=[" + + text + + "], " + + "policies=" + + Arrays.asList(result, policy) + ); } } } @@ -172,20 +177,30 @@ public String getName() { * @return The value extracted from bucket found at * aggPath */ - public static Double resolveBucketValue(MultiBucketsAggregation agg, - InternalMultiBucketAggregation.InternalBucket bucket, String aggPath, GapPolicy gapPolicy) { + public static Double resolveBucketValue( + MultiBucketsAggregation agg, + InternalMultiBucketAggregation.InternalBucket bucket, + String aggPath, + GapPolicy gapPolicy + ) { List aggPathsList = AggregationPath.parse(aggPath).getPathElementsAsStringList(); return resolveBucketValue(agg, bucket, aggPathsList, gapPolicy); } - public static Double resolveBucketValue(MultiBucketsAggregation agg, - InternalMultiBucketAggregation.InternalBucket bucket, List aggPathAsList, GapPolicy gapPolicy) { + public static Double resolveBucketValue( + MultiBucketsAggregation agg, + InternalMultiBucketAggregation.InternalBucket bucket, + List aggPathAsList, + GapPolicy gapPolicy + ) { try { Object propertyValue = bucket.getProperty(agg.getName(), aggPathAsList); if (propertyValue == null) { - throw new AggregationExecutionException(AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() - + " must reference either a number value or a single value numeric metric aggregation"); + throw new AggregationExecutionException( + AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() + + " must reference either a number value or a single value numeric metric aggregation" + ); } else { double value; if (propertyValue instanceof Number) { @@ -211,8 +226,11 @@ public static Double resolveBucketValue(MultiBucketsAggregation agg, /** * Inspects where we are in the agg tree and tries to format a helpful error */ - private static AggregationExecutionException formatResolutionError(MultiBucketsAggregation agg, - List aggPathAsList, Object propertyValue) { + private static AggregationExecutionException formatResolutionError( + MultiBucketsAggregation agg, + List aggPathAsList, + Object propertyValue + ) { String currentAggName; Object currentAgg; if (aggPathAsList.isEmpty()) { @@ -223,13 +241,21 @@ private static AggregationExecutionException formatResolutionError(MultiBucketsA currentAgg = propertyValue; } if (currentAgg instanceof InternalNumericMetricsAggregation.MultiValue) { - return new AggregationExecutionException(AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() - + " must reference either a number value or a single value numeric metric aggregation, but [" + currentAggName - + "] contains multiple values. Please specify which to use."); + return new AggregationExecutionException( + AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() + + " must reference either a number value or a single value numeric metric aggregation, but [" + + currentAggName + + "] contains multiple values. Please specify which to use." + ); } else { - return new AggregationExecutionException(AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() - + " must reference either a number value or a single value numeric metric aggregation, got: [" - + propertyValue.getClass().getSimpleName() + "] at aggregation [" + currentAggName + "]"); + return new AggregationExecutionException( + AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() + + " must reference either a number value or a single value numeric metric aggregation, got: [" + + propertyValue.getClass().getSimpleName() + + "] at aggregation [" + + currentAggName + + "]" + ); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java index d6c09c9f066d6..5377b0d7ab6b0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java @@ -8,8 +8,8 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; @@ -31,8 +31,7 @@ public BucketMetricsParser() { } @Override - public final BucketMetricsPipelineAggregationBuilder parse(String pipelineAggregatorName, XContentParser parser) - throws IOException { + public final BucketMetricsPipelineAggregationBuilder parse(String pipelineAggregatorName, XContentParser parser) throws IOException { XContentParser.Token token; String currentFieldName = null; String[] bucketsPaths = null; @@ -70,11 +69,13 @@ public final BucketMetricsPipelineAggregationBuilder parse(String pipelineAgg } if (bucketsPaths == null) { - throw new ParsingException(parser.getTokenLocation(), - "Missing required field [" + BUCKETS_PATH.getPreferredName() + "] for aggregation [" + pipelineAggregatorName + "]"); + throw new ParsingException( + parser.getTokenLocation(), + "Missing required field [" + BUCKETS_PATH.getPreferredName() + "] for aggregation [" + pipelineAggregatorName + "]" + ); } - BucketMetricsPipelineAggregationBuilder factory = buildFactory(pipelineAggregatorName, bucketsPaths[0], params); + BucketMetricsPipelineAggregationBuilder factory = buildFactory(pipelineAggregatorName, bucketsPaths[0], params); if (format != null) { factory.format(format); } @@ -82,24 +83,34 @@ public final BucketMetricsPipelineAggregationBuilder parse(String pipelineAgg factory.gapPolicy(gapPolicy); } - assert(factory != null); + assert (factory != null); return factory; } - protected abstract BucketMetricsPipelineAggregationBuilder buildFactory(String pipelineAggregatorName, String bucketsPaths, - Map params); + protected abstract BucketMetricsPipelineAggregationBuilder buildFactory( + String pipelineAggregatorName, + String bucketsPaths, + Map params + ); - protected boolean token(XContentParser parser, String field, - XContentParser.Token token, Map params) throws IOException { + protected boolean token(XContentParser parser, String field, XContentParser.Token token, Map params) + throws IOException { return false; } - private void parseToken(String aggregationName, XContentParser parser, String currentFieldName, - XContentParser.Token currentToken, Map params) throws IOException { + private void parseToken( + String aggregationName, + XContentParser parser, + String currentFieldName, + XContentParser.Token currentToken, + Map params + ) throws IOException { if (token(parser, currentFieldName, currentToken, params) == false) { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + currentToken + " [" + currentFieldName + "] in [" + aggregationName + "]"); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + currentToken + " [" + currentFieldName + "] in [" + aggregationName + "]" + ); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregationBuilder.java index 65d74364fe062..e6ca1aefadb27 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregationBuilder.java @@ -20,8 +20,8 @@ import java.util.Objects; import java.util.Optional; -public abstract class BucketMetricsPipelineAggregationBuilder> - extends AbstractPipelineAggregationBuilder { +public abstract class BucketMetricsPipelineAggregationBuilder> extends + AbstractPipelineAggregationBuilder { private String format; private GapPolicy gapPolicy; @@ -108,17 +108,25 @@ protected void validate(ValidationContext context) { // metric name after them by using '.' so need to split on both to get // just the agg name final String firstAgg = bucketsPaths[0].split("[>\\.]")[0]; - Optional aggBuilder = context.getSiblingAggregations().stream() - .filter(builder -> builder.getName().equals(firstAgg)) - .findAny(); + Optional aggBuilder = context.getSiblingAggregations() + .stream() + .filter(builder -> builder.getName().equals(firstAgg)) + .findAny(); if (aggBuilder.isEmpty()) { context.addBucketPathValidationError("aggregation does not exist for aggregation [" + name + "]: " + bucketsPaths[0]); return; } if (aggBuilder.get().bucketCardinality() != AggregationBuilder.BucketCardinality.MANY) { - context.addValidationError("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " must be a multi-bucket aggregation for aggregation [" + name + "] found :" - + aggBuilder.get().getClass().getName() + " for buckets path: " + bucketsPaths[0]); + context.addValidationError( + "The first aggregation in " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " must be a multi-bucket aggregation for aggregation [" + + name + + "] found :" + + aggBuilder.get().getClass().getName() + + " for buckets path: " + + bucketsPaths[0] + ); } } @@ -148,8 +156,7 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; @SuppressWarnings("unchecked") BucketMetricsPipelineAggregationBuilder other = (BucketMetricsPipelineAggregationBuilder) obj; - return Objects.equals(format, other.format) - && Objects.equals(gapPolicy, other.gapPolicy); + return Objects.equals(format, other.format) && Objects.equals(gapPolicy, other.gapPolicy); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java index c94cacda15606..cc53eab68327a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java @@ -29,8 +29,13 @@ public abstract class BucketMetricsPipelineAggregator extends SiblingPipelineAgg protected final DocValueFormat format; protected final GapPolicy gapPolicy; - BucketMetricsPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat format, - Map metadata) { + BucketMetricsPipelineAggregator( + String name, + String[] bucketsPaths, + GapPolicy gapPolicy, + DocValueFormat format, + Map metadata + ) { super(name, bucketsPaths, metadata); this.gapPolicy = gapPolicy; this.format = format; @@ -60,8 +65,7 @@ public final InternalAggregation doReduce(Aggregations aggregations, ReduceConte * Called before initial collection and between successive collection runs. * A chance to initialize or re-initialize state */ - protected void preCollection() { - } + protected void preCollection() {} /** * Called after a collection run is finished to build the aggregation for diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java index 1d350d29d5e1f..d581e3d46b4f7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java @@ -41,14 +41,21 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr private GapPolicy gapPolicy = GapPolicy.SKIP; public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, false, (args, name) -> { - @SuppressWarnings("unchecked") - var bucketsPathsMap = (Map) args[0]; - return new BucketScriptPipelineAggregationBuilder(name, bucketsPathsMap, (Script) args[1]); - }); + NAME, + false, + (args, name) -> { + @SuppressWarnings("unchecked") + var bucketsPathsMap = (Map) args[0]; + return new BucketScriptPipelineAggregationBuilder(name, bucketsPathsMap, (Script) args[1]); + } + ); static { - PARSER.declareField(constructorArg(), BucketScriptPipelineAggregationBuilder::extractBucketPath, - BUCKETS_PATH_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); + PARSER.declareField( + constructorArg(), + BucketScriptPipelineAggregationBuilder::extractBucketPath, + BUCKETS_PATH_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING + ); Script.declareScript(PARSER, constructorArg()); PARSER.declareString(BucketScriptPipelineAggregationBuilder::format, FORMAT); @@ -60,7 +67,6 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr }, GAP_POLICY, ObjectParser.ValueType.STRING); }; - public BucketScriptPipelineAggregationBuilder(String name, Map bucketsPathsMap, Script script) { super(name, NAME, new TreeMap<>(bucketsPathsMap).values().toArray(new String[bucketsPathsMap.size()])); this.bucketsPathsMap = bucketsPathsMap; @@ -100,23 +106,23 @@ protected void doWriteTo(StreamOutput out) throws IOException { private static Map extractBucketPath(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); - if (token == XContentParser.Token.VALUE_STRING) { - // input is a string, name of the path set to '_value'. - // This is a bit odd as there is not constructor for it - return Collections.singletonMap("_value", parser.text()); - } else if (token == XContentParser.Token.START_ARRAY) { - // input is an array, name of the path set to '_value' + position - Map bucketsPathsMap = new HashMap<>(); - int i =0; - while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { - String path = parser.text(); - bucketsPathsMap.put("_value" + i++, path); - } - return bucketsPathsMap; - } else { - // input is an object, it should contain name / value pairs - return parser.mapStrings(); - } + if (token == XContentParser.Token.VALUE_STRING) { + // input is a string, name of the path set to '_value'. + // This is a bit odd as there is not constructor for it + return Collections.singletonMap("_value", parser.text()); + } else if (token == XContentParser.Token.START_ARRAY) { + // input is an array, name of the path set to '_value' + position + Map bucketsPathsMap = new HashMap<>(); + int i = 0; + while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { + String path = parser.text(); + bucketsPathsMap.put("_value" + i++, path); + } + return bucketsPathsMap; + } else { + // input is an object, it should contain name / value pairs + return parser.mapStrings(); + } } private static Map convertToBucketsPathMap(String[] bucketsPaths) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java index 235cf3d8e8dd5..553abf85d521e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java @@ -32,8 +32,14 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator { private final Script script; private final Map bucketsPathsMap; - BucketScriptPipelineAggregator(String name, Map bucketsPathsMap, Script script, DocValueFormat formatter, - GapPolicy gapPolicy, Map metadata) { + BucketScriptPipelineAggregator( + String name, + Map bucketsPathsMap, + Script script, + DocValueFormat formatter, + GapPolicy gapPolicy, + Map metadata + ) { super(name, bucketsPathsMap.values().toArray(new String[0]), metadata); this.bucketsPathsMap = bucketsPathsMap; this.script = script; @@ -43,13 +49,12 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator { @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { - @SuppressWarnings({"rawtypes", "unchecked"}) + @SuppressWarnings({ "rawtypes", "unchecked" }) InternalMultiBucketAggregation originalAgg = - (InternalMultiBucketAggregation) aggregation; + (InternalMultiBucketAggregation) aggregation; List buckets = originalAgg.getBuckets(); - BucketAggregationScript.Factory factory = - reduceContext.scriptService().compile(script, BucketAggregationScript.CONTEXT); + BucketAggregationScript.Factory factory = reduceContext.scriptService().compile(script, BucketAggregationScript.CONTEXT); List newBuckets = new ArrayList<>(); for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { Map vars = new HashMap<>(); @@ -74,13 +79,16 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext if (returned == null) { newBuckets.add(bucket); } else { - final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map( - (p) -> (InternalAggregation) p).collect(Collectors.toList()); + final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) + .map((p) -> (InternalAggregation) p) + .collect(Collectors.toList()); InternalSimpleValue simpleValue = new InternalSimpleValue(name(), returned.doubleValue(), formatter, metadata()); aggs.add(simpleValue); - InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(InternalAggregations.from(aggs), - bucket); + InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket( + InternalAggregations.from(aggs), + bucket + ); newBuckets.add(newBucket); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java index 16020c7d7addd..7a57a55c21ab3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java @@ -119,8 +119,10 @@ public static BucketSelectorPipelineAggregationBuilder parse(String reducerName, } else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { script = Script.parse(parser); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]." + ); } } else if (token == XContentParser.Token.START_ARRAY) { if (BUCKETS_PATH.match(currentFieldName, parser.getDeprecationHandler())) { @@ -134,8 +136,10 @@ public static BucketSelectorPipelineAggregationBuilder parse(String reducerName, bucketsPathsMap.put("_value" + i, paths.get(i)); } } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]." + ); } } else if (token == XContentParser.Token.START_OBJECT) { if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -147,8 +151,10 @@ public static BucketSelectorPipelineAggregationBuilder parse(String reducerName, bucketsPathsMap.put(entry.getKey(), String.valueOf(entry.getValue())); } } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]." + ); } } else { throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " in [" + reducerName + "]."); @@ -156,17 +162,28 @@ public static BucketSelectorPipelineAggregationBuilder parse(String reducerName, } if (bucketsPathsMap == null) { - throw new ParsingException(parser.getTokenLocation(), "Missing required field [" + BUCKETS_PATH.getPreferredName() - + "] for bucket_selector aggregation [" + reducerName + "]"); + throw new ParsingException( + parser.getTokenLocation(), + "Missing required field [" + BUCKETS_PATH.getPreferredName() + "] for bucket_selector aggregation [" + reducerName + "]" + ); } if (script == null) { - throw new ParsingException(parser.getTokenLocation(), "Missing required field [" + Script.SCRIPT_PARSE_FIELD.getPreferredName() - + "] for bucket_selector aggregation [" + reducerName + "]"); + throw new ParsingException( + parser.getTokenLocation(), + "Missing required field [" + + Script.SCRIPT_PARSE_FIELD.getPreferredName() + + "] for bucket_selector aggregation [" + + reducerName + + "]" + ); } - BucketSelectorPipelineAggregationBuilder factory = - new BucketSelectorPipelineAggregationBuilder(reducerName, bucketsPathsMap, script); + BucketSelectorPipelineAggregationBuilder factory = new BucketSelectorPipelineAggregationBuilder( + reducerName, + bucketsPathsMap, + script + ); if (gapPolicy != null) { factory.gapPolicy(gapPolicy); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregator.java index b582130c23e74..07035a2c22f15 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregator.java @@ -27,8 +27,13 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator { private Script script; private Map bucketsPathsMap; - BucketSelectorPipelineAggregator(String name, Map bucketsPathsMap, Script script, GapPolicy gapPolicy, - Map metadata) { + BucketSelectorPipelineAggregator( + String name, + Map bucketsPathsMap, + Script script, + GapPolicy gapPolicy, + Map metadata + ) { super(name, bucketsPathsMap.values().toArray(new String[0]), metadata); this.bucketsPathsMap = bucketsPathsMap; this.script = script; @@ -37,13 +42,13 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator { @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { - @SuppressWarnings({"rawtypes", "unchecked"}) + @SuppressWarnings({ "rawtypes", "unchecked" }) InternalMultiBucketAggregation originalAgg = - (InternalMultiBucketAggregation) aggregation; + (InternalMultiBucketAggregation) aggregation; List buckets = originalAgg.getBuckets(); - BucketAggregationSelectorScript.Factory factory = - reduceContext.scriptService().compile(script, BucketAggregationSelectorScript.CONTEXT); + BucketAggregationSelectorScript.Factory factory = reduceContext.scriptService() + .compile(script, BucketAggregationSelectorScript.CONTEXT); List newBuckets = new ArrayList<>(); for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { Map vars = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java index df6630601b30a..7684e0d4484ba 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java @@ -7,11 +7,11 @@ */ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; @@ -44,23 +44,26 @@ public class BucketSortPipelineAggregationBuilder extends AbstractPipelineAggreg private static final ParseField SIZE = new ParseField("size"); @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, - false, (a, context) -> new BucketSortPipelineAggregationBuilder(context, (List) a[0])); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + false, + (a, context) -> new BucketSortPipelineAggregationBuilder(context, (List) a[0]) + ); static { PARSER.declareField(optionalConstructorArg(), (p, c) -> { - List> sorts = SortBuilder.fromXContent(p); - List fieldSorts = new ArrayList<>(sorts.size()); - for (SortBuilder sort : sorts) { - if (sort instanceof FieldSortBuilder == false) { - throw new IllegalArgumentException("[" + NAME + "] only supports field based sorting; incompatible sort: [" - + sort + "]"); - } - fieldSorts.add((FieldSortBuilder) sort); - } - return fieldSorts; - }, SearchSourceBuilder.SORT_FIELD, - ObjectParser.ValueType.OBJECT_ARRAY); + List> sorts = SortBuilder.fromXContent(p); + List fieldSorts = new ArrayList<>(sorts.size()); + for (SortBuilder sort : sorts) { + if (sort instanceof FieldSortBuilder == false) { + throw new IllegalArgumentException( + "[" + NAME + "] only supports field based sorting; incompatible sort: [" + sort + "]" + ); + } + fieldSorts.add((FieldSortBuilder) sort); + } + return fieldSorts; + }, SearchSourceBuilder.SORT_FIELD, ObjectParser.ValueType.OBJECT_ARRAY); PARSER.declareInt(BucketSortPipelineAggregationBuilder::from, FROM); PARSER.declareInt(BucketSortPipelineAggregationBuilder::size, SIZE); PARSER.declareField(BucketSortPipelineAggregationBuilder::gapPolicy, p -> { @@ -133,9 +136,14 @@ protected PipelineAggregator createInternal(Map metadata) { protected void validate(ValidationContext context) { context.validateHasParent(NAME, name); if (sorts.isEmpty() && size == null && from == 0) { - context.addValidationError("[" + name + "] is configured to perform nothing. Please set either of " + context.addValidationError( + "[" + + name + + "] is configured to perform nothing. Please set either of " + Arrays.asList(SearchSourceBuilder.SORT_FIELD.getPreferredName(), SIZE.getPreferredName(), FROM.getPreferredName()) - + " to use " + NAME); + + " to use " + + NAME + ); } } @@ -171,9 +179,9 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; BucketSortPipelineAggregationBuilder other = (BucketSortPipelineAggregationBuilder) obj; return Objects.equals(sorts, other.sorts) - && Objects.equals(from, other.from) - && Objects.equals(size, other.size) - && Objects.equals(gapPolicy, other.gapPolicy); + && Objects.equals(from, other.from) + && Objects.equals(size, other.size) + && Objects.equals(gapPolicy, other.gapPolicy); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregator.java index 21391baf21987..24ba865241dcf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregator.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.search.aggregations.pipeline; - import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; @@ -29,8 +28,14 @@ public class BucketSortPipelineAggregator extends PipelineAggregator { private final Integer size; private final GapPolicy gapPolicy; - BucketSortPipelineAggregator(String name, List sorts, int from, Integer size, GapPolicy gapPolicy, - Map metadata) { + BucketSortPipelineAggregator( + String name, + List sorts, + int from, + Integer size, + GapPolicy gapPolicy, + Map metadata + ) { super(name, sorts.stream().map(FieldSortBuilder::getFieldName).toArray(String[]::new), metadata); this.sorts = sorts; this.from = from; @@ -40,9 +45,9 @@ public class BucketSortPipelineAggregator extends PipelineAggregator { @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { - @SuppressWarnings({"rawtypes", "unchecked"}) + @SuppressWarnings({ "rawtypes", "unchecked" }) InternalMultiBucketAggregation originalAgg = - (InternalMultiBucketAggregation) aggregation; + (InternalMultiBucketAggregation) aggregation; List buckets = originalAgg.getBuckets(); int bucketsCount = buckets.size(); int currentSize = size == null ? bucketsCount : size; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java index 10989ce2eb265..e2f6aafaa84e1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java @@ -29,7 +29,10 @@ public class CumulativeSumPipelineAggregationBuilder extends AbstractPipelineAgg @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, false, (args, name) -> new CumulativeSumPipelineAggregationBuilder(name, (List) args[0])); + NAME, + false, + (args, name) -> new CumulativeSumPipelineAggregationBuilder(name, (List) args[0]) + ); static { PARSER.declareStringArray(constructorArg(), BUCKETS_PATH_FIELD); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java index d116ee48c8d0e..f6738848f087a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java @@ -28,8 +28,7 @@ public class CumulativeSumPipelineAggregator extends PipelineAggregator { private final DocValueFormat formatter; - CumulativeSumPipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, - Map metadata) { + CumulativeSumPipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, Map metadata) { super(name, bucketsPaths, metadata); this.formatter = formatter; } @@ -37,9 +36,11 @@ public class CumulativeSumPipelineAggregator extends PipelineAggregator { @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { @SuppressWarnings("rawtypes") - InternalMultiBucketAggregation - histo = (InternalMultiBucketAggregation) aggregation; + InternalMultiBucketAggregation< + ? extends InternalMultiBucketAggregation, + ? extends InternalMultiBucketAggregation.InternalBucket> histo = (InternalMultiBucketAggregation< + ? extends InternalMultiBucketAggregation, + ? extends InternalMultiBucketAggregation.InternalBucket>) aggregation; List buckets = histo.getBuckets(); HistogramFactory factory = (HistogramFactory) histo; List newBuckets = new ArrayList<>(buckets.size()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java index 77a0537f553f5..f65bb33c77c07 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java @@ -8,14 +8,14 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.TimeValue; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -139,8 +139,9 @@ protected PipelineAggregator createInternal(Map metadata) { @Override protected void validate(ValidationContext context) { if (bucketsPaths.length != 1) { - context.addValidationError(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " must contain a single entry for aggregation [" + name + "]"); + context.addValidationError( + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must contain a single entry for aggregation [" + name + "]" + ); } context.validateParentAggSequentiallyOrdered(NAME, name); @@ -181,8 +182,10 @@ public static DerivativePipelineAggregationBuilder parse(String pipelineAggregat } else if (UNIT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { units = parser.text(); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" + currentFieldName + "]." + ); } } else if (token == XContentParser.Token.START_ARRAY) { if (BUCKETS_PATH_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -193,22 +196,31 @@ public static DerivativePipelineAggregationBuilder parse(String pipelineAggregat } bucketsPaths = paths.toArray(new String[paths.size()]); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" + currentFieldName + "]." + ); } } else { - throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " in [" + pipelineAggregatorName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + token + " in [" + pipelineAggregatorName + "]." + ); } } if (bucketsPaths == null) { - throw new ParsingException(parser.getTokenLocation(), "Missing required field [" + BUCKETS_PATH_FIELD.getPreferredName() - + "] for derivative aggregation [" + pipelineAggregatorName + "]"); + throw new ParsingException( + parser.getTokenLocation(), + "Missing required field [" + + BUCKETS_PATH_FIELD.getPreferredName() + + "] for derivative aggregation [" + + pipelineAggregatorName + + "]" + ); } - DerivativePipelineAggregationBuilder factory = - new DerivativePipelineAggregationBuilder(pipelineAggregatorName, bucketsPaths[0]); + DerivativePipelineAggregationBuilder factory = new DerivativePipelineAggregationBuilder(pipelineAggregatorName, bucketsPaths[0]); if (format != null) { factory.format(format); } @@ -227,9 +239,7 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; DerivativePipelineAggregationBuilder other = (DerivativePipelineAggregationBuilder) obj; - return Objects.equals(format, other.format) && - gapPolicy == other.gapPolicy && - Objects.equals(units, other.units); + return Objects.equals(format, other.format) && gapPolicy == other.gapPolicy && Objects.equals(units, other.units); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregator.java index 215cf05cc2225..19abd5af6fd90 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregator.java @@ -30,8 +30,14 @@ public class DerivativePipelineAggregator extends PipelineAggregator { private final GapPolicy gapPolicy; private final Double xAxisUnits; - DerivativePipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, GapPolicy gapPolicy, Long xAxisUnits, - Map metadata) { + DerivativePipelineAggregator( + String name, + String[] bucketsPaths, + DocValueFormat formatter, + GapPolicy gapPolicy, + Long xAxisUnits, + Map metadata + ) { super(name, bucketsPaths, metadata); this.formatter = formatter; this.gapPolicy = gapPolicy; @@ -41,9 +47,11 @@ public class DerivativePipelineAggregator extends PipelineAggregator { @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { @SuppressWarnings("rawtypes") - InternalMultiBucketAggregation - histo = (InternalMultiBucketAggregation) aggregation; + InternalMultiBucketAggregation< + ? extends InternalMultiBucketAggregation, + ? extends InternalMultiBucketAggregation.InternalBucket> histo = (InternalMultiBucketAggregation< + ? extends InternalMultiBucketAggregation, + ? extends InternalMultiBucketAggregation.InternalBucket>) aggregation; List buckets = histo.getBuckets(); HistogramFactory factory = (HistogramFactory) histo; @@ -59,9 +67,9 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext if (xAxisUnits != null) { xDiff = (thisBucketKey.doubleValue() - lastBucketKey.doubleValue()) / xAxisUnits; } - final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> { - return (InternalAggregation) p; - }).collect(Collectors.toList()); + final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) + .map((p) -> { return (InternalAggregation) p; }) + .collect(Collectors.toList()); aggs.add(new InternalDerivative(name(), gradient, xDiff, formatter, metadata())); Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); newBuckets.add(newBucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucket.java index e6c6cc2076499..105589d21379a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucket.java @@ -13,5 +13,4 @@ /** * Extended Statistics over a set of buckets */ -public interface ExtendedStatsBucket extends ExtendedStats { -} +public interface ExtendedStatsBucket extends ExtendedStats {} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketParser.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketParser.java index db9c076dc3e37..f59af1a15db4d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketParser.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketParser.java @@ -18,10 +18,15 @@ public class ExtendedStatsBucketParser extends BucketMetricsParser { static final ParseField SIGMA = new ParseField("sigma"); @Override - protected ExtendedStatsBucketPipelineAggregationBuilder buildFactory(String pipelineAggregatorName, - String bucketsPath, Map params) { - ExtendedStatsBucketPipelineAggregationBuilder factory = - new ExtendedStatsBucketPipelineAggregationBuilder(pipelineAggregatorName, bucketsPath); + protected ExtendedStatsBucketPipelineAggregationBuilder buildFactory( + String pipelineAggregatorName, + String bucketsPath, + Map params + ) { + ExtendedStatsBucketPipelineAggregationBuilder factory = new ExtendedStatsBucketPipelineAggregationBuilder( + pipelineAggregatorName, + bucketsPath + ); Double sigma = (Double) params.get(SIGMA.getPreferredName()); if (sigma != null) { factory.sigma(sigma); @@ -31,8 +36,8 @@ protected ExtendedStatsBucketPipelineAggregationBuilder buildFactory(String pipe } @Override - protected boolean token(XContentParser parser, String field, - XContentParser.Token token, Map params) throws IOException { + protected boolean token(XContentParser parser, String field, XContentParser.Token token, Map params) + throws IOException { if (SIGMA.match(field, parser.getDeprecationHandler()) && token == XContentParser.Token.VALUE_NUMBER) { params.put(SIGMA.getPreferredName(), parser.doubleValue()); return true; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java index a5120fab99c26..61f4d750ef7ab 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java @@ -16,8 +16,8 @@ import java.util.Map; import java.util.Objects; -public class ExtendedStatsBucketPipelineAggregationBuilder - extends BucketMetricsPipelineAggregationBuilder { +public class ExtendedStatsBucketPipelineAggregationBuilder extends BucketMetricsPipelineAggregationBuilder< + ExtendedStatsBucketPipelineAggregationBuilder> { public static final String NAME = "extended_stats_bucket"; private double sigma = 2.0; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregator.java index 1edfad1890e48..4c056f9cb25c1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregator.java @@ -22,8 +22,14 @@ public class ExtendedStatsBucketPipelineAggregator extends BucketMetricsPipeline private double max = Double.NEGATIVE_INFINITY; private double sumOfSqrs = 1; - ExtendedStatsBucketPipelineAggregator(String name, String[] bucketsPaths, double sigma, GapPolicy gapPolicy, - DocValueFormat formatter, Map metadata) { + ExtendedStatsBucketPipelineAggregator( + String name, + String[] bucketsPaths, + double sigma, + GapPolicy gapPolicy, + DocValueFormat formatter, + Map metadata + ) { super(name, bucketsPaths, gapPolicy, formatter, metadata); this.sigma = sigma; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java index 839e93c3dec65..2104715d5567a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java @@ -8,9 +8,9 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -116,7 +116,6 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; InternalBucketMetricValue other = (InternalBucketMetricValue) obj; - return Objects.equals(value, other.value) - && Arrays.equals(keys, other.keys); + return Objects.equals(value, other.value) && Arrays.equals(keys, other.keys); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivative.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivative.java index 0c75dabfa73aa..d015d51c9709f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivative.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivative.java @@ -96,7 +96,6 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; InternalDerivative other = (InternalDerivative) obj; - return Objects.equals(value, other.value) - && Objects.equals(normalizationFactor, other.normalizationFactor); + return Objects.equals(value, other.value) && Objects.equals(normalizationFactor, other.normalizationFactor); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucket.java index 8b97f9eff6e23..616d4b53171ce 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucket.java @@ -18,8 +18,17 @@ import java.util.Map; public class InternalExtendedStatsBucket extends InternalExtendedStats implements ExtendedStatsBucket { - InternalExtendedStatsBucket(String name, long count, double sum, double min, double max, double sumOfSqrs, double sigma, - DocValueFormat formatter, Map metadata) { + InternalExtendedStatsBucket( + String name, + long count, + double sum, + double min, + double max, + double sumOfSqrs, + double sigma, + DocValueFormat formatter, + Map metadata + ) { super(name, count, sum, min, max, sumOfSqrs, sigma, formatter, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java index 1ff8245328c58..6a5f2620cc1e4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java @@ -33,12 +33,22 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation private final transient Map percentileLookups = new HashMap<>(); - InternalPercentilesBucket(String name, double[] percents, double[] percentiles, boolean keyed, - DocValueFormat formatter, Map metadata) { + InternalPercentilesBucket( + String name, + double[] percents, + double[] percentiles, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) { super(name, metadata); if ((percentiles.length == percents.length) == false) { - throw new IllegalArgumentException("The number of provided percents and percentiles didn't match. percents: " - + Arrays.toString(percents) + ", percentiles: " + Arrays.toString(percentiles)); + throw new IllegalArgumentException( + "The number of provided percents and percentiles didn't match. percents: " + + Arrays.toString(percents) + + ", percentiles: " + + Arrays.toString(percentiles) + ); } this.format = formatter; this.percentiles = percentiles; @@ -83,8 +93,13 @@ public String getWriteableName() { public double percentile(double percent) throws IllegalArgumentException { Double percentile = percentileLookups.get(percent); if (percentile == null) { - throw new IllegalArgumentException("Percent requested [" + String.valueOf(percent) + "] was not" + - " one of the computed percentiles. Available keys are: " + Arrays.toString(percents)); + throw new IllegalArgumentException( + "Percent requested [" + + String.valueOf(percent) + + "] was not" + + " one of the computed percentiles. Available keys are: " + + Arrays.toString(percents) + ); } return percentile; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalStatsBucket.java index 9deda0455c037..be638cf3373ac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalStatsBucket.java @@ -18,8 +18,15 @@ import java.util.Map; public class InternalStatsBucket extends InternalStats implements StatsBucket { - public InternalStatsBucket(String name, long count, double sum, double min, double max, DocValueFormat formatter, - Map metadata) { + public InternalStatsBucket( + String name, + long count, + double sum, + double min, + double max, + DocValueFormat formatter, + Map metadata + ) { super(name, count, sum, min, max, formatter, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java index a618ba64562cf..a03800d6dc64c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java @@ -46,8 +46,11 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() { @Override - protected MaxBucketPipelineAggregationBuilder buildFactory(String pipelineAggregatorName, - String bucketsPath, Map params) { + protected MaxBucketPipelineAggregationBuilder buildFactory( + String pipelineAggregatorName, + String bucketsPath, + Map params + ) { return new MaxBucketPipelineAggregationBuilder(pipelineAggregatorName, bucketsPath); } }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregator.java index b0693f9a3d8cb..1013e149db326 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregator.java @@ -20,8 +20,13 @@ public class MaxBucketPipelineAggregator extends BucketMetricsPipelineAggregator private List maxBucketKeys; private double maxValue; - MaxBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, - Map metadata) { + MaxBucketPipelineAggregator( + String name, + String[] bucketsPaths, + GapPolicy gapPolicy, + DocValueFormat formatter, + Map metadata + ) { super(name, bucketsPaths, gapPolicy, formatter, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java index 6939306bb92ee..857c9487713a6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java @@ -46,8 +46,11 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() { @Override - protected MinBucketPipelineAggregationBuilder buildFactory(String pipelineAggregatorName, - String bucketsPath, Map params) { + protected MinBucketPipelineAggregationBuilder buildFactory( + String pipelineAggregatorName, + String bucketsPath, + Map params + ) { return new MinBucketPipelineAggregationBuilder(pipelineAggregatorName, bucketsPath); } }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregator.java index babe8e19071a5..6517c469c6d8b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregator.java @@ -20,8 +20,13 @@ public class MinBucketPipelineAggregator extends BucketMetricsPipelineAggregator private List minBucketKeys; private double minValue; - MinBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, - Map metadata) { + MinBucketPipelineAggregator( + String name, + String[] bucketsPaths, + GapPolicy gapPolicy, + DocValueFormat formatter, + Map metadata + ) { super(name, bucketsPaths, gapPolicy, formatter, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilder.java index cc66e8fcc92bf..49ab98486f358 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilder.java @@ -8,12 +8,12 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; @@ -43,12 +43,18 @@ public class MovFnPipelineAggregationBuilder extends AbstractPipelineAggregation private int shift; public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, false, - (args, name) -> new MovFnPipelineAggregationBuilder(name, (String) args[0], (Script) args[1], (int)args[2])); + NAME, + false, + (args, name) -> new MovFnPipelineAggregationBuilder(name, (String) args[0], (Script) args[1], (int) args[2]) + ); static { PARSER.declareString(constructorArg(), BUCKETS_PATH_FIELD); - PARSER.declareField(constructorArg(), - (p, c) -> Script.parse(p), Script.SCRIPT_PARSE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING); + PARSER.declareField( + constructorArg(), + (p, c) -> Script.parse(p), + Script.SCRIPT_PARSE_FIELD, + ObjectParser.ValueType.OBJECT_OR_STRING + ); PARSER.declareInt(constructorArg(), WINDOW); PARSER.declareInt(MovFnPipelineAggregationBuilder::setShift, SHIFT); @@ -61,9 +67,8 @@ public class MovFnPipelineAggregationBuilder extends AbstractPipelineAggregation }, GAP_POLICY, ObjectParser.ValueType.STRING); }; - public MovFnPipelineAggregationBuilder(String name, String bucketsPath, Script script, int window) { - super(name, NAME, new String[]{bucketsPath}); + super(name, NAME, new String[] { bucketsPath }); this.bucketsPathString = bucketsPath; this.script = script; if (window <= 0) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregator.java index 410b33334a7b3..a400f1eeddcf8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregator.java @@ -50,9 +50,17 @@ public class MovFnPipelineAggregator extends PipelineAggregator { private final int window; private final int shift; - MovFnPipelineAggregator(String name, String bucketsPath, Script script, int window, int shift, DocValueFormat formatter, - BucketHelpers.GapPolicy gapPolicy, Map metadata) { - super(name, new String[]{bucketsPath}, metadata); + MovFnPipelineAggregator( + String name, + String bucketsPath, + Script script, + int window, + int shift, + DocValueFormat formatter, + BucketHelpers.GapPolicy gapPolicy, + Map metadata + ) { + super(name, new String[] { bucketsPath }, metadata); this.bucketsPath = bucketsPath; this.script = script; this.formatter = formatter; @@ -64,9 +72,11 @@ public class MovFnPipelineAggregator extends PipelineAggregator { @Override public InternalAggregation reduce(InternalAggregation aggregation, InternalAggregation.ReduceContext reduceContext) { @SuppressWarnings("rawtypes") - InternalMultiBucketAggregation - histo = (InternalMultiBucketAggregation) aggregation; + InternalMultiBucketAggregation< + ? extends InternalMultiBucketAggregation, + ? extends InternalMultiBucketAggregation.InternalBucket> histo = (InternalMultiBucketAggregation< + ? extends InternalMultiBucketAggregation, + ? extends InternalMultiBucketAggregation.InternalBucket>) aggregation; List buckets = histo.getBuckets(); HistogramFactory factory = (HistogramFactory) histo; @@ -90,7 +100,7 @@ public InternalAggregation reduce(InternalAggregation aggregation, InternalAggre for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy); - // Default is to reuse existing bucket. Simplifies the rest of the logic, + // Default is to reuse existing bucket. Simplifies the rest of the logic, // since we only change newBucket if we can add to it MultiBucketsAggregation.Bucket newBucket = bucket; @@ -102,13 +112,10 @@ public InternalAggregation reduce(InternalAggregation aggregation, InternalAggre int toIndex = clamp(index + shift, values); double movavg = executableScript.execute( vars, - values.subList(fromIndex, toIndex).stream() - .mapToDouble(Double::doubleValue) - .toArray() + values.subList(fromIndex, toIndex).stream().mapToDouble(Double::doubleValue).toArray() ); - List aggs = StreamSupport - .stream(bucket.getAggregations().spliterator(), false) + List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) .map(InternalAggregation.class::cast) .collect(Collectors.toList()); aggs.add(new InternalSimpleValue(name(), movavg, formatter, metadata())); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctionScript.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctionScript.java index 9b355ce16f59f..af9ef625e4808 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctionScript.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctionScript.java @@ -29,6 +29,6 @@ public interface Factory extends ScriptFactory { MovingFunctionScript newInstance(); } - public static final String[] PARAMETERS = new String[] {"params", "values"}; + public static final String[] PARAMETERS = new String[] { "params", "values" }; public static final ScriptContext CONTEXT = new ScriptContext<>("moving-function", Factory.class); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctions.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctions.java index 362002f4023d2..0b982f8f2e586 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctions.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctions.java @@ -219,8 +219,7 @@ public static double holt(double[] values, double alpha, double beta) { * @param period the expected periodicity of the data * @param multiplicative true if multiplicative HW should be used. False for additive */ - public static double holtWinters(double[] values, double alpha, double beta, double gamma, - int period, boolean multiplicative) { + public static double holtWinters(double[] values, double alpha, double beta, double gamma, int period, boolean multiplicative) { if (values.length == 0) { return Double.NaN; @@ -230,8 +229,15 @@ public static double holtWinters(double[] values, double alpha, double beta, dou if (values.length < period * 2) { // We need at least two full "seasons" to use HW // This should have been caught earlier, we can't do anything now...bail - throw new IllegalArgumentException("Holt-Winters aggregation requires at least (2 * period == 2 * " - + period + " == "+(2 * period)+") data-points to function. Only [" + values.length + "] were provided."); + throw new IllegalArgumentException( + "Holt-Winters aggregation requires at least (2 * period == 2 * " + + period + + " == " + + (2 * period) + + ") data-points to function. Only [" + + values.length + + "] were provided." + ); } // Smoothed value @@ -288,9 +294,9 @@ public static double holtWinters(double[] values, double alpha, double beta, dou b = beta * (s - last_s) + (1 - beta) * last_b; if (multiplicative) { - seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + seasonal[i] = gamma * (vs[i] / (last_s + last_b)) + (1 - gamma) * seasonal[i - period]; } else { - seasonal[i] = gamma * (vs[i] - (last_s - last_b )) + (1 - gamma) * seasonal[i - period]; + seasonal[i] = gamma * (vs[i] - (last_s - last_b)) + (1 - gamma) * seasonal[i - period]; } last_s = s; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedBucketMetricValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedBucketMetricValue.java index 9e122bfa3a326..c921d01692f9b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedBucketMetricValue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedBucketMetricValue.java @@ -47,7 +47,10 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) } private static final ObjectParser PARSER = new ObjectParser<>( - ParsedBucketMetricValue.class.getSimpleName(), true, ParsedBucketMetricValue::new); + ParsedBucketMetricValue.class.getSimpleName(), + true, + ParsedBucketMetricValue::new + ); static { declareSingleValueFields(PARSER, Double.NEGATIVE_INFINITY); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedDerivative.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedDerivative.java index 22be4129d9e63..88e950e90a312 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedDerivative.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedDerivative.java @@ -8,9 +8,9 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -34,8 +34,11 @@ public String getType() { return DerivativePipelineAggregationBuilder.NAME; } - private static final ObjectParser PARSER = new ObjectParser<>(ParsedDerivative.class.getSimpleName(), true, - ParsedDerivative::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedDerivative.class.getSimpleName(), + true, + ParsedDerivative::new + ); static { declareSingleValueFields(PARSER, Double.NaN); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedExtendedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedExtendedStatsBucket.java index 55210818398ed..0f14e27dea111 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedExtendedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedExtendedStatsBucket.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; - public class ParsedExtendedStatsBucket extends ParsedExtendedStats implements ExtendedStatsBucket { @Override @@ -21,7 +20,10 @@ public String getType() { } private static final ObjectParser PARSER = new ObjectParser<>( - ParsedExtendedStatsBucket.class.getSimpleName(), true, ParsedExtendedStatsBucket::new); + ParsedExtendedStatsBucket.class.getSimpleName(), + true, + ParsedExtendedStatsBucket::new + ); static { declareExtendedStatsFields(PARSER); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedPercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedPercentilesBucket.java index 23a4a211beccf..5f458595ade90 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedPercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedPercentilesBucket.java @@ -29,8 +29,13 @@ public String getType() { public double percentile(double percent) throws IllegalArgumentException { Double value = percentiles.get(percent); if (value == null) { - throw new IllegalArgumentException("Percent requested [" + String.valueOf(percent) + "] was not" + - " one of the computed percentiles. Available keys are: " + percentiles.keySet()); + throw new IllegalArgumentException( + "Percent requested [" + + String.valueOf(percent) + + "] was not" + + " one of the computed percentiles. Available keys are: " + + percentiles.keySet() + ); } return value; } @@ -73,8 +78,11 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th return builder; } - private static final ObjectParser PARSER = - new ObjectParser<>(ParsedPercentilesBucket.class.getSimpleName(), true, ParsedPercentilesBucket::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedPercentilesBucket.class.getSimpleName(), + true, + ParsedPercentilesBucket::new + ); static { ParsedPercentiles.declarePercentilesFields(PARSER); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedSimpleValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedSimpleValue.java index 6c12d5b195153..5b7c7ef6c8f3b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedSimpleValue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedSimpleValue.java @@ -22,8 +22,11 @@ public String getType() { return InternalSimpleValue.NAME; } - private static final ObjectParser PARSER = new ObjectParser<>(ParsedSimpleValue.class.getSimpleName(), true, - ParsedSimpleValue::new); + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedSimpleValue.class.getSimpleName(), + true, + ParsedSimpleValue::new + ); static { declareSingleValueFields(PARSER, Double.NaN); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedStatsBucket.java index acf8b86463da9..4442aaf4cb6da 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedStatsBucket.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.metrics.ParsedStats; - public class ParsedStatsBucket extends ParsedStats implements StatsBucket { @Override @@ -21,7 +20,10 @@ public String getType() { } private static final ObjectParser PARSER = new ObjectParser<>( - ParsedStatsBucket.class.getSimpleName(), true, ParsedStatsBucket::new); + ParsedStatsBucket.class.getSimpleName(), + true, + ParsedStatsBucket::new + ); static { declareStatsFields(PARSER); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucket.java index 8e093403a4b27..393a6165f2fe9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucket.java @@ -10,5 +10,4 @@ import org.elasticsearch.search.aggregations.metrics.Percentiles; -public interface PercentilesBucket extends Percentiles { -} +public interface PercentilesBucket extends Percentiles {} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java index 658e4951fe78a..48edf2829f44d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java @@ -10,9 +10,9 @@ import com.carrotsearch.hppc.DoubleArrayList; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -21,8 +21,8 @@ import java.util.Map; import java.util.Objects; -public class PercentilesBucketPipelineAggregationBuilder - extends BucketMetricsPipelineAggregationBuilder { +public class PercentilesBucketPipelineAggregationBuilder extends BucketMetricsPipelineAggregationBuilder< + PercentilesBucketPipelineAggregationBuilder> { public static final String NAME = "percentiles_bucket"; static final ParseField PERCENTS_FIELD = new ParseField("percents"); static final ParseField KEYED_FIELD = new ParseField("keyed"); @@ -37,8 +37,7 @@ public PercentilesBucketPipelineAggregationBuilder(String name, String bucketsPa /** * Read from a stream. */ - public PercentilesBucketPipelineAggregationBuilder(StreamInput in) - throws IOException { + public PercentilesBucketPipelineAggregationBuilder(StreamInput in) throws IOException { super(in, NAME); percents = in.readDoubleArray(); keyed = in.readBoolean(); @@ -66,8 +65,9 @@ public PercentilesBucketPipelineAggregationBuilder setPercents(double[] percents } for (Double p : percents) { if (p == null || p < 0.0 || p > 100.0) { - throw new IllegalArgumentException(PERCENTS_FIELD.getPreferredName() - + " must only contain non-null doubles from 0.0-100.0 inclusive"); + throw new IllegalArgumentException( + PERCENTS_FIELD.getPreferredName() + " must only contain non-null doubles from 0.0-100.0 inclusive" + ); } } this.percents = percents; @@ -99,8 +99,9 @@ protected void validate(ValidationContext context) { super.validate(context); for (Double p : percents) { if (p == null || p < 0.0 || p > 100.0) { - context.addValidationError(PERCENTS_FIELD.getPreferredName() - + " must only contain non-null doubles from 0.0-100.0 inclusive"); + context.addValidationError( + PERCENTS_FIELD.getPreferredName() + " must only contain non-null doubles from 0.0-100.0 inclusive" + ); return; } } @@ -118,11 +119,16 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() { @Override - protected PercentilesBucketPipelineAggregationBuilder buildFactory(String pipelineAggregatorName, - String bucketsPath, Map params) { + protected PercentilesBucketPipelineAggregationBuilder buildFactory( + String pipelineAggregatorName, + String bucketsPath, + Map params + ) { - PercentilesBucketPipelineAggregationBuilder factory = new - PercentilesBucketPipelineAggregationBuilder(pipelineAggregatorName, bucketsPath); + PercentilesBucketPipelineAggregationBuilder factory = new PercentilesBucketPipelineAggregationBuilder( + pipelineAggregatorName, + bucketsPath + ); double[] percents = (double[]) params.get(PERCENTS_FIELD.getPreferredName()); if (percents != null) { @@ -138,7 +144,7 @@ protected PercentilesBucketPipelineAggregationBuilder buildFactory(String pipeli @Override protected boolean token(XContentParser parser, String field, XContentParser.Token token, Map params) - throws IOException { + throws IOException { if (PERCENTS_FIELD.match(field, parser.getDeprecationHandler()) && token == XContentParser.Token.START_ARRAY) { DoubleArrayList percents = new DoubleArrayList(10); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { @@ -146,8 +152,7 @@ protected boolean token(XContentParser parser, String field, XContentParser.Toke } params.put(PERCENTS_FIELD.getPreferredName(), percents.toArray()); return true; - } - else if (KEYED_FIELD.match(field, parser.getDeprecationHandler()) && token == XContentParser.Token.VALUE_BOOLEAN){ + } else if (KEYED_FIELD.match(field, parser.getDeprecationHandler()) && token == XContentParser.Token.VALUE_BOOLEAN) { params.put(KEYED_FIELD.getPreferredName(), parser.booleanValue()); return true; } @@ -167,8 +172,7 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; PercentilesBucketPipelineAggregationBuilder other = (PercentilesBucketPipelineAggregationBuilder) obj; - return Objects.deepEquals(percents, other.percents) - && Objects.equals(keyed, other.keyed); + return Objects.deepEquals(percents, other.percents) && Objects.equals(keyed, other.keyed); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java index 9b174a1846112..1149ce6fbeb76 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java @@ -23,8 +23,15 @@ public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAg private boolean keyed = true; private List data; - PercentilesBucketPipelineAggregator(String name, double[] percents, boolean keyed, String[] bucketsPaths, - GapPolicy gapPolicy, DocValueFormat formatter, Map metadata) { + PercentilesBucketPipelineAggregator( + String name, + double[] percents, + boolean keyed, + String[] bucketsPaths, + GapPolicy gapPolicy, + DocValueFormat formatter, + Map metadata + ) { super(name, bucketsPaths, gapPolicy, formatter, metadata); this.percents = percents; this.keyed = keyed; @@ -32,7 +39,7 @@ public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAg @Override protected void preCollection() { - data = new ArrayList<>(1024); + data = new ArrayList<>(1024); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregator.java index 3d57202f3d55c..090dfd73f028c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregator.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.pipeline; - import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -43,8 +42,7 @@ public interface Parser { * @throws java.io.IOException * When parsing fails */ - PipelineAggregationBuilder parse(String pipelineAggregatorName, XContentParser parser) - throws IOException; + PipelineAggregationBuilder parse(String pipelineAggregatorName, XContentParser parser) throws IOException; } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java index 94314bed79dae..de0d7a85a42f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.DocValueFormat; @@ -158,21 +158,31 @@ public static SerialDiffPipelineAggregationBuilder parse(String reducerName, XCo } else if (GAP_POLICY.match(currentFieldName, parser.getDeprecationHandler())) { gapPolicy = GapPolicy.parse(parser.text(), parser.getTokenLocation()); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]." + ); } } else if (token == XContentParser.Token.VALUE_NUMBER) { if (LAG.match(currentFieldName, parser.getDeprecationHandler())) { lag = parser.intValue(true); if (lag <= 0) { - throw new ParsingException(parser.getTokenLocation(), - "Lag must be a positive, non-zero integer. Value supplied was" + - lag + " in [" + reducerName + "]: [" - + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Lag must be a positive, non-zero integer. Value supplied was" + + lag + + " in [" + + reducerName + + "]: [" + + currentFieldName + + "]." + ); } - } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."); + } else { + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]." + ); } } else if (token == XContentParser.Token.START_ARRAY) { if (BUCKETS_PATH.match(currentFieldName, parser.getDeprecationHandler())) { @@ -183,22 +193,28 @@ public static SerialDiffPipelineAggregationBuilder parse(String reducerName, XCo } bucketsPaths = paths.toArray(new String[paths.size()]); } else { - throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."); + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]." + ); } } else { - throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " in [" + reducerName + "].", - parser.getTokenLocation()); + throw new ParsingException( + parser.getTokenLocation(), + "Unexpected token " + token + " in [" + reducerName + "].", + parser.getTokenLocation() + ); } } if (bucketsPaths == null) { - throw new ParsingException(parser.getTokenLocation(), - "Missing required field [" + BUCKETS_PATH.getPreferredName() + "] for derivative aggregation [" + reducerName + "]"); + throw new ParsingException( + parser.getTokenLocation(), + "Missing required field [" + BUCKETS_PATH.getPreferredName() + "] for derivative aggregation [" + reducerName + "]" + ); } - SerialDiffPipelineAggregationBuilder factory = - new SerialDiffPipelineAggregationBuilder(reducerName, bucketsPaths[0]); + SerialDiffPipelineAggregationBuilder factory = new SerialDiffPipelineAggregationBuilder(reducerName, bucketsPaths[0]); if (lag != null) { factory.lag(lag); } @@ -222,9 +238,7 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; SerialDiffPipelineAggregationBuilder other = (SerialDiffPipelineAggregationBuilder) obj; - return Objects.equals(format, other.format) - && Objects.equals(gapPolicy, other.gapPolicy) - && Objects.equals(lag, other.lag); + return Objects.equals(format, other.format) && Objects.equals(gapPolicy, other.gapPolicy) && Objects.equals(lag, other.lag); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java index 8b0634e8f594b..cc522f62d9c02 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java @@ -8,8 +8,8 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.collect.EvictingQueue; +import org.elasticsearch.core.Nullable; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; @@ -32,8 +32,14 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator { private GapPolicy gapPolicy; private int lag; - SerialDiffPipelineAggregator(String name, String[] bucketsPaths, @Nullable DocValueFormat formatter, GapPolicy gapPolicy, - int lag, Map metadata) { + SerialDiffPipelineAggregator( + String name, + String[] bucketsPaths, + @Nullable DocValueFormat formatter, + GapPolicy gapPolicy, + int lag, + Map metadata + ) { super(name, bucketsPaths, metadata); this.formatter = formatter; this.gapPolicy = gapPolicy; @@ -43,9 +49,11 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator { @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { @SuppressWarnings("rawtypes") - InternalMultiBucketAggregation - histo = (InternalMultiBucketAggregation) aggregation; + InternalMultiBucketAggregation< + ? extends InternalMultiBucketAggregation, + ? extends InternalMultiBucketAggregation.InternalBucket> histo = (InternalMultiBucketAggregation< + ? extends InternalMultiBucketAggregation, + ? extends InternalMultiBucketAggregation.InternalBucket>) aggregation; List buckets = histo.getBuckets(); HistogramFactory factory = (HistogramFactory) histo; @@ -76,8 +84,9 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext if (Double.isNaN(thisBucketValue) == false && Double.isNaN(lagValue) == false) { double diff = thisBucketValue - lagValue; - List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map( - (p) -> (InternalAggregation) p).collect(Collectors.toList()); + List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) + .map((p) -> (InternalAggregation) p) + .collect(Collectors.toList()); aggs.add(new InternalSimpleValue(name(), diff, formatter, metadata())); newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java index 8302beba0a8d1..502547289e633 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java @@ -25,8 +25,7 @@ public StatsBucketPipelineAggregationBuilder(String name, String bucketsPath) { /** * Read from a stream. */ - public StatsBucketPipelineAggregationBuilder(StreamInput in) - throws IOException { + public StatsBucketPipelineAggregationBuilder(StreamInput in) throws IOException { super(in, NAME); } @@ -47,8 +46,11 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() { @Override - protected StatsBucketPipelineAggregationBuilder buildFactory(String pipelineAggregatorName, - String bucketsPath, Map params) { + protected StatsBucketPipelineAggregationBuilder buildFactory( + String pipelineAggregatorName, + String bucketsPath, + Map params + ) { return new StatsBucketPipelineAggregationBuilder(pipelineAggregatorName, bucketsPath); } }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregator.java index 3aeb0306e3d17..001117240af87 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregator.java @@ -20,8 +20,13 @@ public class StatsBucketPipelineAggregator extends BucketMetricsPipelineAggregat private double min = Double.POSITIVE_INFINITY; private double max = Double.NEGATIVE_INFINITY; - StatsBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, - Map metadata) { + StatsBucketPipelineAggregator( + String name, + String[] bucketsPaths, + GapPolicy gapPolicy, + DocValueFormat formatter, + Map metadata + ) { super(name, bucketsPaths, gapPolicy, formatter, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java index 049c24568277a..8914292f0a845 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java @@ -46,8 +46,11 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() { @Override - protected SumBucketPipelineAggregationBuilder buildFactory(String pipelineAggregatorName, - String bucketsPath, Map params) { + protected SumBucketPipelineAggregationBuilder buildFactory( + String pipelineAggregatorName, + String bucketsPath, + Map params + ) { return new SumBucketPipelineAggregationBuilder(pipelineAggregatorName, bucketsPath); } }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregator.java index bfd7a8ea4bca3..e8143d79378f3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregator.java @@ -17,8 +17,13 @@ public class SumBucketPipelineAggregator extends BucketMetricsPipelineAggregator { private double sum = 0; - SumBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, - Map metadata) { + SumBucketPipelineAggregator( + String name, + String[] bucketsPaths, + GapPolicy gapPolicy, + DocValueFormat formatter, + Map metadata + ) { super(name, bucketsPaths, gapPolicy, formatter, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java index c52dcacc4537b..5008fbe08eac4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java @@ -11,12 +11,12 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.PreallocatedCircuitBreakerService; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInfo.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInfo.java index 93c42ebbf1b38..d62d1f187c5d4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInfo.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInfo.java @@ -43,7 +43,7 @@ public AggregationInfo(StreamInput in) throws IOException { String key = in.readString(); final int keys = in.readVInt(); final Set types = new TreeSet<>(); - for (int j = 0; j < keys; j ++) { + for (int j = 0; j < keys; j++) { types.add(in.readString()); } aggs.put(key, types); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInspectionHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInspectionHelper.java index 9d5c129037f6c..957d83efabf35 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInspectionHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInspectionHelper.java @@ -13,10 +13,10 @@ import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters; import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoGrid; import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; -import org.elasticsearch.search.aggregations.bucket.histogram.InternalVariableWidthHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalVariableWidthHistogram; import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing; import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNested; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java index 6b3814f89c8e8..6f7da696cf2ba 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java @@ -202,8 +202,8 @@ public Aggregator resolveTopmostAggregator(Aggregator root) { AggregationPath.PathElement token = pathElements.get(0); // TODO both unwrap and subAggregator are only used here! Aggregator aggregator = ProfilingAggregator.unwrap(root.subAggregator(token.name)); - assert (aggregator instanceof SingleBucketAggregator) - || (aggregator instanceof NumericMetricsAggregator) : "this should be picked up before aggregation execution - on validate"; + assert (aggregator instanceof SingleBucketAggregator) || (aggregator instanceof NumericMetricsAggregator) + : "this should be picked up before aggregation execution - on validate"; return aggregator; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java index 5041f6be529e8..853aa152db036 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java @@ -34,8 +34,9 @@ public void registerAggregationUsage(String aggregationName) { public void registerAggregationUsage(String aggregationName, String valuesSourceType) { Map subAgg = aggs.computeIfAbsent(aggregationName, k -> new HashMap<>()); if (subAgg.put(valuesSourceType, new LongAdder()) != null) { - throw new IllegalArgumentException("stats for aggregation [" + aggregationName + "][" + valuesSourceType + - "] already registered"); + throw new IllegalArgumentException( + "stats for aggregation [" + aggregationName + "][" + valuesSourceType + "] already registered" + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java index 40572395adf8b..124ceba77ad83 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java @@ -59,8 +59,9 @@ public ValuesSource getScript(AggregationScript.LeafFactory script, ValueType sc public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFactory script, AggregationContext context) { if ((fieldContext.indexFieldData() instanceof IndexNumericFieldData) == false) { - throw new IllegalArgumentException("Expected numeric type on field [" + fieldContext.field() + - "], but got [" + fieldContext.fieldType().typeName() + "]"); + throw new IllegalArgumentException( + "Expected numeric type on field [" + fieldContext.field() + "], but got [" + fieldContext.fieldType().typeName() + "]" + ); } ValuesSource.Numeric dataSource = new ValuesSource.Numeric.FieldData((IndexNumericFieldData) fieldContext.indexFieldData()); @@ -72,8 +73,12 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa } @Override - public ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context) { + public ValuesSource replaceMissing( + ValuesSource valuesSource, + Object rawMissing, + DocValueFormat docValueFormat, + AggregationContext context + ) { Number missing; if (rawMissing instanceof Number) { missing = (Number) rawMissing; @@ -127,8 +132,12 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa } @Override - public ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context) { + public ValuesSource replaceMissing( + ValuesSource valuesSource, + Object rawMissing, + DocValueFormat docValueFormat, + AggregationContext context + ) { final BytesRef missing = docValueFormat.parseBytesRef(rawMissing.toString()); if (valuesSource instanceof ValuesSource.Bytes.WithOrdinals) { return MissingValues.replaceMissing((ValuesSource.Bytes.WithOrdinals) valuesSource, missing); @@ -151,16 +160,21 @@ public ValuesSource getScript(AggregationScript.LeafFactory script, ValueType sc @Override public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFactory script, AggregationContext context) { if ((fieldContext.indexFieldData() instanceof IndexGeoPointFieldData) == false) { - throw new IllegalArgumentException("Expected geo_point type on field [" + fieldContext.field() + - "], but got [" + fieldContext.fieldType().typeName() + "]"); + throw new IllegalArgumentException( + "Expected geo_point type on field [" + fieldContext.field() + "], but got [" + fieldContext.fieldType().typeName() + "]" + ); } return new ValuesSource.GeoPoint.Fielddata((IndexGeoPointFieldData) fieldContext.indexFieldData()); } @Override - public ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context) { + public ValuesSource replaceMissing( + ValuesSource valuesSource, + Object rawMissing, + DocValueFormat docValueFormat, + AggregationContext context + ) { // TODO: also support the structured formats of geo points final GeoPoint missing = new GeoPoint(rawMissing.toString()); return MissingValues.replaceMissing((ValuesSource.GeoPoint) valuesSource, missing); @@ -194,8 +208,12 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa } @Override - public ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context) { + public ValuesSource replaceMissing( + ValuesSource valuesSource, + Object rawMissing, + DocValueFormat docValueFormat, + AggregationContext context + ) { throw new IllegalArgumentException("Can't apply missing values on a " + valuesSource.getClass()); } }, @@ -216,8 +234,12 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa } @Override - public ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context) { + public ValuesSource replaceMissing( + ValuesSource valuesSource, + Object rawMissing, + DocValueFormat docValueFormat, + AggregationContext context + ) { return KEYWORD.replaceMissing(valuesSource, rawMissing, docValueFormat, context); } @@ -249,8 +271,9 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa private ValuesSource.Numeric fieldData(FieldContext fieldContext, AggregationContext context) { if ((fieldContext.indexFieldData() instanceof IndexNumericFieldData) == false) { - throw new IllegalArgumentException("Expected numeric type on field [" + fieldContext.field() + - "], but got [" + fieldContext.fieldType().typeName() + "]"); + throw new IllegalArgumentException( + "Expected numeric type on field [" + fieldContext.field() + "], but got [" + fieldContext.fieldType().typeName() + "]" + ); } if (fieldContext.fieldType() instanceof DateFieldType == false) { return new ValuesSource.Numeric.FieldData((IndexNumericFieldData) fieldContext.indexFieldData()); @@ -270,7 +293,7 @@ public Function roundingPreparer() throws IOExcepti * The range of dates, min first, then max. This is an array so we can * write to it inside the QueryVisitor below. */ - long[] range = new long[] {Long.MIN_VALUE, Long.MAX_VALUE}; + long[] range = new long[] { Long.MIN_VALUE, Long.MAX_VALUE }; // Check the search index for bounds if (fieldContext.fieldType().isSearchable()) { @@ -326,19 +349,24 @@ public void visitLeaf(Query query) { } @Override - public ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context) { + public ValuesSource replaceMissing( + ValuesSource valuesSource, + Object rawMissing, + DocValueFormat docValueFormat, + AggregationContext context + ) { return NUMERIC.replaceMissing(valuesSource, rawMissing, docValueFormat, context); } @Override public DocValueFormat getFormatter(String format, ZoneId tz) { - return new DocValueFormat.DateTime( + return new DocValueFormat.DateTime( format == null ? DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER : DateFormatter.forPattern(format), tz == null ? ZoneOffset.UTC : tz, // If we were just looking at fields, we could read the resolution from the field settings, but we need to deal with script - // output, which has no way to indicate the resolution, so we need to default to something. Milliseconds is the standard. - DateFieldMapper.Resolution.MILLISECONDS); + // output, which has no way to indicate the resolution, so we need to default to something. Milliseconds is the standard. + DateFieldMapper.Resolution.MILLISECONDS + ); } }, BOOLEAN() { @@ -358,8 +386,12 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa } @Override - public ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context) { + public ValuesSource replaceMissing( + ValuesSource valuesSource, + Object rawMissing, + DocValueFormat docValueFormat, + AggregationContext context + ) { return NUMERIC.replaceMissing(valuesSource, rawMissing, docValueFormat, context); } @@ -367,8 +399,7 @@ public ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, public DocValueFormat getFormatter(String format, ZoneId tz) { return DocValueFormat.BOOLEAN; } - } - ; + }; public static ValuesSourceType fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java index 0ebfc3e9f346a..210f8e85ade42 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java @@ -76,6 +76,7 @@ public BytesRef nextValue() throws IOException { return missing; } } + @Override public String toString() { return "anon SortedBinaryDocValues of [" + super.toString() + "]"; @@ -109,6 +110,7 @@ public SortedNumericDoubleValues doubleValues(LeafReaderContext context) throws final SortedNumericDoubleValues values = valuesSource.doubleValues(context); return replaceMissing(values, missing.doubleValue()); } + @Override public String toString() { return "anon ValuesSource.Numeric of [" + super.toString() + "]"; @@ -209,17 +211,19 @@ public SortedSetDocValues ordinalsValues(LeafReaderContext context) throws IOExc } @Override - public SortedSetDocValues globalOrdinalsValues(LeafReaderContext context) - throws IOException { + public SortedSetDocValues globalOrdinalsValues(LeafReaderContext context) throws IOException { SortedSetDocValues values = valuesSource.globalOrdinalsValues(context); return replaceMissing(values, missing); } @Override public LongUnaryOperator globalOrdinalsMapping(LeafReaderContext context) throws IOException { - return getGlobalMapping(valuesSource.ordinalsValues(context), - valuesSource.globalOrdinalsValues(context), - valuesSource.globalOrdinalsMapping(context), missing); + return getGlobalMapping( + valuesSource.ordinalsValues(context), + valuesSource.globalOrdinalsValues(context), + valuesSource.globalOrdinalsMapping(context), + missing + ); } @Override @@ -230,8 +234,7 @@ public String toString() { }; } - static SortedSetDocValues replaceMissing(final SortedSetDocValues values, - final BytesRef missing) throws IOException { + static SortedSetDocValues replaceMissing(final SortedSetDocValues values, final BytesRef missing) throws IOException { final long missingOrd = values.lookupTerm(missing); if (missingOrd >= 0) { // The value already exists @@ -242,8 +245,7 @@ static SortedSetDocValues replaceMissing(final SortedSetDocValues values, } } - static SortedSetDocValues replaceMissingOrd(final SortedSetDocValues values, - final long missingOrd) { + static SortedSetDocValues replaceMissingOrd(final SortedSetDocValues values, final long missingOrd) { return new AbstractSortedSetDocValues() { private boolean hasOrds; @@ -290,8 +292,7 @@ public String toString() { }; } - static SortedSetDocValues insertOrd(final SortedSetDocValues values, final long insertedOrd, - final BytesRef missingValue) { + static SortedSetDocValues insertOrd(final SortedSetDocValues values, final long insertedOrd, final BytesRef missingValue) { return new AbstractSortedSetDocValues() { private boolean hasOrds; @@ -348,8 +349,12 @@ public String toString() { }; } - static LongUnaryOperator getGlobalMapping(SortedSetDocValues values, SortedSetDocValues globalValues, - LongUnaryOperator segmentToGlobalOrd, BytesRef missing) throws IOException { + static LongUnaryOperator getGlobalMapping( + SortedSetDocValues values, + SortedSetDocValues globalValues, + LongUnaryOperator segmentToGlobalOrd, + BytesRef missing + ) throws IOException { final long missingGlobalOrd = globalValues.lookupTerm(missing); final long missingSegmentOrd = values.lookupTerm(missing); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSource.java index b4fc0f541ac73..deb85a63e3127 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSource.java @@ -19,7 +19,7 @@ /** * Class to encapsulate a set of ValuesSource objects labeled by field name */ -public abstract class MultiValuesSource { +public abstract class MultiValuesSource { protected Map values; public static class NumericMultiValuesSource extends MultiValuesSource { @@ -28,8 +28,9 @@ public NumericMultiValuesSource(Map valuesSourceConf for (Map.Entry entry : valuesSourceConfigs.entrySet()) { final ValuesSource valuesSource = entry.getValue().getValuesSource(); if (valuesSource instanceof ValuesSource.Numeric == false) { - throw new AggregationExecutionException("ValuesSource type " + valuesSource.toString() + - "is not supported for multi-valued aggregation"); + throw new AggregationExecutionException( + "ValuesSource type " + valuesSource.toString() + "is not supported for multi-valued aggregation" + ); } values.put(entry.getKey(), (ValuesSource.Numeric) valuesSource); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java index 39e6f5fa6d669..e148451476742 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java @@ -7,10 +7,10 @@ */ package org.elasticsearch.search.aggregations.support; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; @@ -29,12 +29,11 @@ * * A limitation of this class is that all the ValuesSource's being refereenced must be of the same type. */ -public abstract class MultiValuesSourceAggregationBuilder> - extends AbstractAggregationBuilder { +public abstract class MultiValuesSourceAggregationBuilder> extends + AbstractAggregationBuilder { - - public abstract static class LeafOnly> - extends MultiValuesSourceAggregationBuilder { + public abstract static class LeafOnly> extends MultiValuesSourceAggregationBuilder< + AB> { protected LeafOnly(String name) { super(name); @@ -43,8 +42,9 @@ protected LeafOnly(String name) { protected LeafOnly(LeafOnly clone, Builder factoriesBuilder, Map metadata) { super(clone, factoriesBuilder, metadata); if (factoriesBuilder.count() > 0) { - throw new AggregationInitializationException("Aggregator [" + name + "] of type [" - + getType() + "] cannot accept sub-aggregations"); + throw new AggregationInitializationException( + "Aggregator [" + name + "] of type [" + getType() + "] cannot accept sub-aggregations" + ); } } @@ -57,13 +57,12 @@ protected LeafOnly(StreamInput in) throws IOException { @Override public AB subAggregations(Builder subFactories) { - throw new AggregationInitializationException("Aggregator [" + name + "] of type [" + - getType() + "] cannot accept sub-aggregations"); + throw new AggregationInitializationException( + "Aggregator [" + name + "] of type [" + getType() + "] cannot accept sub-aggregations" + ); } } - - private Map fields = new HashMap<>(); private ValueType userValueTypeHint = null; private String format = null; @@ -72,8 +71,11 @@ protected MultiValuesSourceAggregationBuilder(String name) { super(name); } - protected MultiValuesSourceAggregationBuilder(MultiValuesSourceAggregationBuilder clone, - Builder factoriesBuilder, Map metadata) { + protected MultiValuesSourceAggregationBuilder( + MultiValuesSourceAggregationBuilder clone, + Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.fields = new HashMap<>(clone.fields); @@ -84,8 +86,7 @@ protected MultiValuesSourceAggregationBuilder(MultiValuesSourceAggregationBuilde /** * Read from a stream. */ - protected MultiValuesSourceAggregationBuilder(StreamInput in) - throws IOException { + protected MultiValuesSourceAggregationBuilder(StreamInput in) throws IOException { super(in); read(in); } @@ -155,13 +156,24 @@ public AB format(String format) { protected abstract ValuesSourceType defaultValueSourceType(); @Override - protected final MultiValuesSourceAggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, - Builder subFactoriesBuilder) throws IOException { + protected final MultiValuesSourceAggregatorFactory doBuild( + AggregationContext context, + AggregatorFactory parent, + Builder subFactoriesBuilder + ) throws IOException { Map configs = new HashMap<>(fields.size()); Map filters = new HashMap<>(fields.size()); fields.forEach((key, value) -> { - ValuesSourceConfig config = ValuesSourceConfig.resolveUnregistered(context, userValueTypeHint, - value.getFieldName(), value.getScript(), value.getMissing(), value.getTimeZone(), format, defaultValueSourceType()); + ValuesSourceConfig config = ValuesSourceConfig.resolveUnregistered( + context, + userValueTypeHint, + value.getFieldName(), + value.getScript(), + value.getMissing(), + value.getTimeZone(), + format, + defaultValueSourceType() + ); configs.put(key, config); filters.put(key, value.getFilter()); }); @@ -170,9 +182,11 @@ protected final MultiValuesSourceAggregatorFactory doBuild(AggregationContext co return innerBuild(context, configs, filters, docValueFormat, parent, subFactoriesBuilder); } - - public static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType, - ValuesSourceType defaultValuesSourceType) { + public static DocValueFormat resolveFormat( + @Nullable String format, + @Nullable ValueType valueType, + ValuesSourceType defaultValuesSourceType + ) { if (valueType == null) { // If the user didn't send a hint, all we can do is fall back to the default return defaultValuesSourceType.getFormatter(format, null); @@ -184,12 +198,14 @@ public static DocValueFormat resolveFormat(@Nullable String format, @Nullable Va return valueFormat; } - protected abstract MultiValuesSourceAggregatorFactory innerBuild(AggregationContext context, - Map configs, - Map filters, - DocValueFormat format, AggregatorFactory parent, - Builder subFactoriesBuilder) throws IOException; - + protected abstract MultiValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + Map configs, + Map filters, + DocValueFormat format, + AggregatorFactory parent, + Builder subFactoriesBuilder + ) throws IOException; @Override public final XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { @@ -217,7 +233,6 @@ public int hashCode() { return Objects.hash(super.hashCode(), fields, format, userValueTypeHint); } - @Override public boolean equals(Object obj) { if (this == obj) return true; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregatorFactory.java index 3388ee3764f88..3a87920d6f8b6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregatorFactory.java @@ -22,10 +22,15 @@ public abstract class MultiValuesSourceAggregatorFactory extends AggregatorFacto protected final Map configs; protected final DocValueFormat format; - public MultiValuesSourceAggregatorFactory(String name, Map configs, - DocValueFormat format, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata) throws IOException { + public MultiValuesSourceAggregatorFactory( + String name, + Map configs, + DocValueFormat format, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { super(name, context, parent, subFactoriesBuilder, metadata); this.configs = configs; this.format = format; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java index 3ca6ed2fc7a0b..2d19f73ef7ad5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java @@ -9,12 +9,12 @@ package org.elasticsearch.search.aggregations.support; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -53,22 +53,33 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject * @param - parser context * @return configured parser */ - public static ObjectParser parserBuilder(boolean scriptable, - boolean timezoneAware, - boolean filtered, - boolean heterogeneous) { - - ObjectParser parser - = new ObjectParser<>(MultiValuesSourceFieldConfig.NAME, MultiValuesSourceFieldConfig.Builder::new); + public static ObjectParser parserBuilder( + boolean scriptable, + boolean timezoneAware, + boolean filtered, + boolean heterogeneous + ) { + + ObjectParser parser = new ObjectParser<>( + MultiValuesSourceFieldConfig.NAME, + MultiValuesSourceFieldConfig.Builder::new + ); parser.declareString(MultiValuesSourceFieldConfig.Builder::setFieldName, ParseField.CommonFields.FIELD); - parser.declareField(MultiValuesSourceFieldConfig.Builder::setMissing, XContentParser::objectText, - ParseField.CommonFields.MISSING, ObjectParser.ValueType.VALUE); + parser.declareField( + MultiValuesSourceFieldConfig.Builder::setMissing, + XContentParser::objectText, + ParseField.CommonFields.MISSING, + ObjectParser.ValueType.VALUE + ); if (scriptable) { - parser.declareField(MultiValuesSourceFieldConfig.Builder::setScript, + parser.declareField( + MultiValuesSourceFieldConfig.Builder::setScript, (p, context) -> Script.parse(p), - Script.SCRIPT_PARSE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING); + Script.SCRIPT_PARSE_FIELD, + ObjectParser.ValueType.OBJECT_OR_STRING + ); } if (timezoneAware) { @@ -82,23 +93,41 @@ public static ObjectParser parserBu } if (filtered) { - parser.declareField(MultiValuesSourceFieldConfig.Builder::setFilter, + parser.declareField( + MultiValuesSourceFieldConfig.Builder::setFilter, (p, context) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), - FILTER, ObjectParser.ValueType.OBJECT); + FILTER, + ObjectParser.ValueType.OBJECT + ); } if (heterogeneous) { - parser.declareField(MultiValuesSourceFieldConfig.Builder::setUserValueTypeHint, - p -> ValueType.lenientParse(p.text()), ValueType.VALUE_TYPE, ObjectParser.ValueType.STRING); - - parser.declareField(MultiValuesSourceFieldConfig.Builder::setFormat, XContentParser::text, - ParseField.CommonFields.FORMAT, ObjectParser.ValueType.STRING); + parser.declareField( + MultiValuesSourceFieldConfig.Builder::setUserValueTypeHint, + p -> ValueType.lenientParse(p.text()), + ValueType.VALUE_TYPE, + ObjectParser.ValueType.STRING + ); + + parser.declareField( + MultiValuesSourceFieldConfig.Builder::setFormat, + XContentParser::text, + ParseField.CommonFields.FORMAT, + ObjectParser.ValueType.STRING + ); } return parser; }; - protected MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, ZoneId timeZone, QueryBuilder filter, - ValueType userValueTypeHint, String format) { + protected MultiValuesSourceFieldConfig( + String fieldName, + Object missing, + Script script, + ZoneId timeZone, + QueryBuilder filter, + ValueType userValueTypeHint, + String format + ) { this.fieldName = fieldName; this.missing = missing; this.script = script; @@ -159,7 +188,6 @@ public String getFormat() { return format; } - @Override public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_7_6_0)) { @@ -198,7 +226,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(FILTER.getPreferredName()); filter.toXContent(builder, params); } - if(userValueTypeHint != null) { + if (userValueTypeHint != null) { builder.field(AggregationBuilder.CommonFields.VALUE_TYPE.getPreferredName(), userValueTypeHint.getPreferredName()); } if (format != null) { @@ -302,15 +330,25 @@ public String getFormat() { public MultiValuesSourceFieldConfig build() { if (Strings.isNullOrEmpty(fieldName) && script == null) { - throw new IllegalArgumentException("[" + ParseField.CommonFields.FIELD.getPreferredName() - + "] and [" + Script.SCRIPT_PARSE_FIELD.getPreferredName() + "] cannot both be null. " + - "Please specify one or the other."); + throw new IllegalArgumentException( + "[" + + ParseField.CommonFields.FIELD.getPreferredName() + + "] and [" + + Script.SCRIPT_PARSE_FIELD.getPreferredName() + + "] cannot both be null. " + + "Please specify one or the other." + ); } if (Strings.isNullOrEmpty(fieldName) == false && script != null) { - throw new IllegalArgumentException("[" + ParseField.CommonFields.FIELD.getPreferredName() - + "] and [" + Script.SCRIPT_PARSE_FIELD.getPreferredName() + "] cannot both be configured. " + - "Please specify one or the other."); + throw new IllegalArgumentException( + "[" + + ParseField.CommonFields.FIELD.getPreferredName() + + "] and [" + + Script.SCRIPT_PARSE_FIELD.getPreferredName() + + "] cannot both be configured. " + + "Please specify one or the other." + ); } return new MultiValuesSourceFieldConfig(fieldName, missing, script, timeZone, filter, userValueTypeHint, format); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java index 96434c57d3f46..9244afa39745f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java @@ -8,32 +8,44 @@ package org.elasticsearch.search.aggregations.support; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.AbstractObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentParser; public final class MultiValuesSourceParseHelper { public static void declareCommon( - AbstractObjectParser, T> objectParser, boolean formattable, - ValueType expectedValueType) { + AbstractObjectParser, T> objectParser, + boolean formattable, + ValueType expectedValueType + ) { objectParser.declareField(MultiValuesSourceAggregationBuilder::userValueTypeHint, p -> { ValueType valueType = ValueType.lenientParse(p.text()); if (expectedValueType != null && valueType.isNotA(expectedValueType)) { - throw new ParsingException(p.getTokenLocation(), - "Aggregation [" + objectParser.getName() + "] was configured with an incompatible value type [" - + valueType + "]. It can only work on value off type [" - + expectedValueType + "]"); + throw new ParsingException( + p.getTokenLocation(), + "Aggregation [" + + objectParser.getName() + + "] was configured with an incompatible value type [" + + valueType + + "]. It can only work on value off type [" + + expectedValueType + + "]" + ); } return valueType; }, ValueType.VALUE_TYPE, ObjectParser.ValueType.STRING); if (formattable) { - objectParser.declareField(MultiValuesSourceAggregationBuilder::format, XContentParser::text, - ParseField.CommonFields.FORMAT, ObjectParser.ValueType.STRING); + objectParser.declareField( + MultiValuesSourceAggregationBuilder::format, + XContentParser::text, + ParseField.CommonFields.FORMAT, + ObjectParser.ValueType.STRING + ); } } @@ -50,10 +62,17 @@ public static void declareCommon( public static void declareField( String fieldName, AbstractObjectParser, T> objectParser, - boolean scriptable, boolean timezoneAware, boolean filterable, boolean heterogeneous) { + boolean scriptable, + boolean timezoneAware, + boolean filterable, + boolean heterogeneous + ) { - objectParser.declareField((o, fieldConfig) -> o.field(fieldName, fieldConfig.build()), + objectParser.declareField( + (o, fieldConfig) -> o.field(fieldName, fieldConfig.build()), (p, c) -> MultiValuesSourceFieldConfig.parserBuilder(scriptable, timezoneAware, filterable, heterogeneous).parse(p, null), - new ParseField(fieldName), ObjectParser.ValueType.OBJECT); + new ParseField(fieldName), + ObjectParser.ValueType.OBJECT + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java index dd78988712f4a..83bfc3cfa50e5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.support; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.DocValueFormat; @@ -27,15 +27,18 @@ @Deprecated public enum ValueType implements Writeable { - STRING((byte) 1, "string", "string", CoreValuesSourceType.KEYWORD, - DocValueFormat.RAW), + STRING((byte) 1, "string", "string", CoreValuesSourceType.KEYWORD, DocValueFormat.RAW), LONG((byte) 2, "byte|short|integer|long", "long", CoreValuesSourceType.NUMERIC, DocValueFormat.RAW), DOUBLE((byte) 3, "float|double", "double", CoreValuesSourceType.NUMERIC, DocValueFormat.RAW), NUMBER((byte) 4, "number", "number", CoreValuesSourceType.NUMERIC, DocValueFormat.RAW), - DATE((byte) 5, "date", "date", CoreValuesSourceType.DATE, - new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ZoneOffset.UTC, - DateFieldMapper.Resolution.MILLISECONDS)), + DATE( + (byte) 5, + "date", + "date", + CoreValuesSourceType.DATE, + new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ZoneOffset.UTC, DateFieldMapper.Resolution.MILLISECONDS) + ), IP((byte) 6, "ip", "ip", CoreValuesSourceType.IP, DocValueFormat.IP), NUMERIC((byte) 7, "numeric", "numeric", CoreValuesSourceType.NUMERIC, DocValueFormat.RAW), GEOPOINT((byte) 8, "geo_point", "geo_point", CoreValuesSourceType.GEOPOINT, DocValueFormat.GEOHASH), @@ -50,8 +53,7 @@ public enum ValueType implements Writeable { public static final ParseField VALUE_TYPE = new ParseField("value_type", "valueType"); - ValueType(byte id, String description, String preferredName, ValuesSourceType valuesSourceType, - DocValueFormat defaultFormat) { + ValueType(byte id, String description, String preferredName, ValuesSourceType valuesSourceType, DocValueFormat defaultFormat) { this.id = id; this.description = description; this.preferredName = preferredName; @@ -67,8 +69,14 @@ public ValuesSourceType getValuesSourceType() { return valuesSourceType; } - private static Set numericValueTypes = Set.of(ValueType.DOUBLE, ValueType.DATE, ValueType.LONG, ValueType.NUMBER, - ValueType.NUMERIC, ValueType.BOOLEAN); + private static Set numericValueTypes = Set.of( + ValueType.DOUBLE, + ValueType.DATE, + ValueType.LONG, + ValueType.NUMBER, + ValueType.NUMERIC, + ValueType.BOOLEAN + ); private static Set stringValueTypes = Set.of(ValueType.STRING, ValueType.IP); /** @@ -99,18 +107,24 @@ public DocValueFormat defaultFormat() { public static ValueType lenientParse(String type) { switch (type) { - case "string": return STRING; + case "string": + return STRING; case "double": - case "float": return DOUBLE; + case "float": + return DOUBLE; case "number": case "numeric": case "long": case "integer": case "short": - case "byte": return LONG; - case "date": return DATE; - case "ip": return IP; - case "boolean": return BOOLEAN; + case "byte": + return LONG; + case "date": + return DATE; + case "ip": + return IP; + case "boolean": + return BOOLEAN; default: // TODO: do not be lenient here return null; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java index ab7dd5f7f9c5a..4e0e783375a8e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java @@ -38,12 +38,13 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileCellIdSource; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.support.values.ScriptBytesValues; import org.elasticsearch.search.aggregations.support.values.ScriptDoubleValues; import org.elasticsearch.search.aggregations.support.values.ScriptLongValues; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileCellIdSource; + import java.io.IOException; import java.util.function.Function; import java.util.function.LongUnaryOperator; @@ -279,7 +280,7 @@ public SortedSetDocValues ordinalsValues(LeafReaderContext context) { @Override public SortedSetDocValues globalOrdinalsValues(LeafReaderContext context) { - final IndexOrdinalsFieldData global = indexFieldData.loadGlobal((DirectoryReader)context.parent.reader()); + final IndexOrdinalsFieldData global = indexFieldData.loadGlobal((DirectoryReader) context.parent.reader()); final LeafOrdinalsFieldData atomicFieldData = global.load(context); return atomicFieldData.getOrdinalsValues(); } @@ -291,7 +292,7 @@ public boolean supportsGlobalOrdinalsMapping() { @Override public LongUnaryOperator globalOrdinalsMapping(LeafReaderContext context) throws IOException { - final IndexOrdinalsFieldData global = indexFieldData.loadGlobal((DirectoryReader)context.parent.reader()); + final IndexOrdinalsFieldData global = indexFieldData.loadGlobal((DirectoryReader) context.parent.reader()); final OrdinalMap map = global.getOrdinalMap(); if (map == null) { // segments and global ordinals are the same @@ -689,7 +690,9 @@ public Function roundingPreparer() throws IOException { return Rounding::prepareForUnknown; } - public RangeType rangeType() { return rangeType; } + public RangeType rangeType() { + return rangeType; + } } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java index 3de13bb6a9132..5315ee6a76607 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java @@ -8,11 +8,11 @@ package org.elasticsearch.search.aggregations.support; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.AbstractObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; @@ -27,48 +27,68 @@ import java.util.Map; import java.util.Objects; -public abstract class ValuesSourceAggregationBuilder> - extends AbstractAggregationBuilder { +public abstract class ValuesSourceAggregationBuilder> extends AbstractAggregationBuilder { public static void declareFields( AbstractObjectParser, T> objectParser, - boolean scriptable, boolean formattable, boolean timezoneAware) { + boolean scriptable, + boolean formattable, + boolean timezoneAware + ) { declareFields(objectParser, scriptable, formattable, timezoneAware, true); } public static void declareFields( AbstractObjectParser, T> objectParser, - boolean scriptable, boolean formattable, boolean timezoneAware, boolean fieldRequired) { - - - objectParser.declareField(ValuesSourceAggregationBuilder::field, XContentParser::text, - ParseField.CommonFields.FIELD, ObjectParser.ValueType.STRING); - - objectParser.declareField(ValuesSourceAggregationBuilder::missing, XContentParser::objectText, - ParseField.CommonFields.MISSING, ObjectParser.ValueType.VALUE); + boolean scriptable, + boolean formattable, + boolean timezoneAware, + boolean fieldRequired + ) { + + objectParser.declareField( + ValuesSourceAggregationBuilder::field, + XContentParser::text, + ParseField.CommonFields.FIELD, + ObjectParser.ValueType.STRING + ); + + objectParser.declareField( + ValuesSourceAggregationBuilder::missing, + XContentParser::objectText, + ParseField.CommonFields.MISSING, + ObjectParser.ValueType.VALUE + ); objectParser.declareField(ValuesSourceAggregationBuilder::userValueTypeHint, p -> { - ValueType type = ValueType.lenientParse(p.text()); - if (type == null) { - throw new IllegalArgumentException("Unknown value type [" + p.text() + "]"); - } - return type; - }, - ValueType.VALUE_TYPE, ObjectParser.ValueType.STRING); + ValueType type = ValueType.lenientParse(p.text()); + if (type == null) { + throw new IllegalArgumentException("Unknown value type [" + p.text() + "]"); + } + return type; + }, ValueType.VALUE_TYPE, ObjectParser.ValueType.STRING); if (formattable) { - objectParser.declareField(ValuesSourceAggregationBuilder::format, XContentParser::text, - ParseField.CommonFields.FORMAT, ObjectParser.ValueType.STRING); + objectParser.declareField( + ValuesSourceAggregationBuilder::format, + XContentParser::text, + ParseField.CommonFields.FORMAT, + ObjectParser.ValueType.STRING + ); } if (scriptable) { - objectParser.declareField(ValuesSourceAggregationBuilder::script, + objectParser.declareField( + ValuesSourceAggregationBuilder::script, (parser, context) -> Script.parse(parser), - Script.SCRIPT_PARSE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING); + Script.SCRIPT_PARSE_FIELD, + ObjectParser.ValueType.OBJECT_OR_STRING + ); if (fieldRequired) { - String[] fields = new String[]{ParseField.CommonFields.FIELD.getPreferredName(), - Script.SCRIPT_PARSE_FIELD.getPreferredName()}; + String[] fields = new String[] { + ParseField.CommonFields.FIELD.getPreferredName(), + Script.SCRIPT_PARSE_FIELD.getPreferredName() }; objectParser.declareRequiredFieldSet(fields); } } else { @@ -88,8 +108,8 @@ public static void declareFields( } } - public abstract static class LeafOnly> - extends ValuesSourceAggregationBuilder { + public abstract static class LeafOnly> extends + ValuesSourceAggregationBuilder { protected LeafOnly(String name) { super(name); @@ -98,8 +118,9 @@ protected LeafOnly(String name) { protected LeafOnly(LeafOnly clone, Builder factoriesBuilder, Map metadata) { super(clone, factoriesBuilder, metadata); if (factoriesBuilder.count() > 0) { - throw new AggregationInitializationException("Aggregator [" + name + "] of type [" - + getType() + "] cannot accept sub-aggregations"); + throw new AggregationInitializationException( + "Aggregator [" + name + "] of type [" + getType() + "] cannot accept sub-aggregations" + ); } } @@ -112,8 +133,9 @@ protected LeafOnly(StreamInput in) throws IOException { @Override public final AB subAggregations(Builder subFactories) { - throw new AggregationInitializationException("Aggregator [" + name + "] of type [" - + getType() + "] cannot accept sub-aggregations"); + throw new AggregationInitializationException( + "Aggregator [" + name + "] of type [" + getType() + "] cannot accept sub-aggregations" + ); } @Override @@ -134,8 +156,11 @@ protected ValuesSourceAggregationBuilder(String name) { super(name); } - protected ValuesSourceAggregationBuilder(ValuesSourceAggregationBuilder clone, - Builder factoriesBuilder, Map metadata) { + protected ValuesSourceAggregationBuilder( + ValuesSourceAggregationBuilder clone, + Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.field = clone.field; this.userValueTypeHint = clone.userValueTypeHint; @@ -149,8 +174,7 @@ protected ValuesSourceAggregationBuilder(ValuesSourceAggregationBuilder clon /** * Read from a stream. */ - protected ValuesSourceAggregationBuilder(StreamInput in) - throws IOException { + protected ValuesSourceAggregationBuilder(StreamInput in) throws IOException { super(in); if (serializeTargetValueType(in.getVersion())) { ValueType valueType = in.readOptionalWriteable(ValueType::readFromStream); @@ -262,9 +286,9 @@ public Script script() { @SuppressWarnings("unchecked") public AB userValueTypeHint(ValueType valueType) { if (valueType == null) { - // TODO: This is nonsense. We allow the value to be null (via constructor), but don't allow it to be set to null. This means - // thing looking to copy settings (like RollupRequestTranslator) need to check if userValueTypeHint is not null, and then - // set it if and only if it is non-null. + // TODO: This is nonsense. We allow the value to be null (via constructor), but don't allow it to be set to null. This means + // thing looking to copy settings (like RollupRequestTranslator) need to check if userValueTypeHint is not null, and then + // set it if and only if it is non-null. throw new IllegalArgumentException("[userValueTypeHint] must not be null: [" + name + "]"); } this.userValueTypeHint = valueType; @@ -335,8 +359,8 @@ public ZoneId timeZone() { } @Override - protected final ValuesSourceAggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, - Builder subFactoriesBuilder) throws IOException { + protected final ValuesSourceAggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, Builder subFactoriesBuilder) + throws IOException { ValuesSourceConfig config = resolveConfig(context); ValuesSourceAggregatorFactory factory; @@ -375,14 +399,24 @@ protected final ValuesSourceAggregatorFactory doBuild(AggregationContext context * @return A {@link ValuesSourceConfig} configured based on the parsed field and/or script. */ protected ValuesSourceConfig resolveConfig(AggregationContext context) { - return ValuesSourceConfig.resolve(context, - this.userValueTypeHint, field, script, missing, timeZone, format, this.defaultValueSourceType()); + return ValuesSourceConfig.resolve( + context, + this.userValueTypeHint, + field, + script, + missing, + timeZone, + format, + this.defaultValueSourceType() + ); } - protected abstract ValuesSourceAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - Builder subFactoriesBuilder) throws IOException; + protected abstract ValuesSourceAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + Builder subFactoriesBuilder + ) throws IOException; @Override public final XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java index 631c475446a15..f62253944bda1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java @@ -20,16 +20,21 @@ public abstract class ValuesSourceAggregatorFactory extends AggregatorFactory { protected ValuesSourceConfig config; - public ValuesSourceAggregatorFactory(String name, ValuesSourceConfig config, AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata) throws IOException { + public ValuesSourceAggregatorFactory( + String name, + ValuesSourceConfig config, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { super(name, context, parent, subFactoriesBuilder, metadata); this.config = config; } @Override - public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, - Map metadata) throws IOException { + public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { if (config.hasValues() == false) { return createUnmapped(parent, metadata); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java index bed1262e9c0ca..687c73ebb1e1d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java @@ -7,8 +7,8 @@ */ package org.elasticsearch.search.aggregations.support; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.Rounding; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; @@ -43,16 +43,26 @@ public class ValuesSourceConfig { * @param defaultValueSourceType - per-aggregation {@link ValuesSource} of last resort. * @return - An initialized {@link ValuesSourceConfig} that will yield the appropriate {@link ValuesSourceType} */ - public static ValuesSourceConfig resolve(AggregationContext context, - ValueType userValueTypeHint, - String field, - Script script, - Object missing, - ZoneId timeZone, - String format, - ValuesSourceType defaultValueSourceType) { - - return internalResolve(context, userValueTypeHint, field, script, missing, timeZone, format, defaultValueSourceType, + public static ValuesSourceConfig resolve( + AggregationContext context, + ValueType userValueTypeHint, + String field, + Script script, + Object missing, + ZoneId timeZone, + String format, + ValuesSourceType defaultValueSourceType + ) { + + return internalResolve( + context, + userValueTypeHint, + field, + script, + missing, + timeZone, + format, + defaultValueSourceType, ValuesSourceConfig::getMappingFromRegistry ); } @@ -73,14 +83,16 @@ public static ValuesSourceConfig resolve(AggregationContext context, * @param defaultValueSourceType - per-aggregation {@link ValuesSource} of last resort. * @return - An initialized {@link ValuesSourceConfig} that will yield the appropriate {@link ValuesSourceType} */ - public static ValuesSourceConfig resolveUnregistered(AggregationContext context, - ValueType userValueTypeHint, - String field, - Script script, - Object missing, - ZoneId timeZone, - String format, - ValuesSourceType defaultValueSourceType) { + public static ValuesSourceConfig resolveUnregistered( + AggregationContext context, + ValueType userValueTypeHint, + String field, + Script script, + Object missing, + ZoneId timeZone, + String format, + ValuesSourceType defaultValueSourceType + ) { return internalResolve( context, userValueTypeHint, @@ -90,19 +102,21 @@ public static ValuesSourceConfig resolveUnregistered(AggregationContext context, timeZone, format, defaultValueSourceType, - ValuesSourceConfig::getLegacyMapping); + ValuesSourceConfig::getLegacyMapping + ); } - private static ValuesSourceConfig internalResolve(AggregationContext context, - ValueType userValueTypeHint, - String field, - Script script, - Object missing, - ZoneId timeZone, - String format, - ValuesSourceType defaultValueSourceType, - FieldResolver fieldResolver - ) { + private static ValuesSourceConfig internalResolve( + AggregationContext context, + ValueType userValueTypeHint, + String field, + Script script, + Object missing, + ZoneId timeZone, + String format, + ValuesSourceType defaultValueSourceType, + FieldResolver fieldResolver + ) { ValuesSourceConfig config; ValuesSourceType valuesSourceType = null; ValueType scriptValueType = userValueTypeHint; @@ -115,8 +129,7 @@ private static ValuesSourceConfig internalResolve(AggregationContext context, } if (field == null) { if (script == null) { - throw new IllegalStateException( - "value source config is invalid; must have either a field or a script"); + throw new IllegalStateException("value source config is invalid; must have either a field or a script"); } } else { // Field case @@ -128,7 +141,7 @@ private static ValuesSourceConfig internalResolve(AggregationContext context, * specified missing value. */ unmapped = true; - aggregationScript = null; // Value scripts are not allowed on unmapped fields. What would that do, anyway? + aggregationScript = null; // Value scripts are not allowed on unmapped fields. What would that do, anyway? } else { if (valuesSourceType == null) { // We have a field, and the user didn't specify a type, so get the type from the field @@ -159,7 +172,8 @@ private interface FieldResolver { ValuesSourceType getValuesSourceType( FieldContext fieldContext, ValueType userValueTypeHint, - ValuesSourceType defaultValuesSourceType); + ValuesSourceType defaultValuesSourceType + ); } @@ -201,8 +215,12 @@ private static AggregationScript.LeafFactory createScript(Script script, Aggrega } } - private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValuesSourceType valuesSourceType, @Nullable ZoneId tz, - @Nullable FieldContext fieldContext) { + private static DocValueFormat resolveFormat( + @Nullable String format, + @Nullable ValuesSourceType valuesSourceType, + @Nullable ZoneId tz, + @Nullable FieldContext fieldContext + ) { if (fieldContext != null) { return fieldContext.fieldType().docValueFormat(format, tz); } @@ -266,18 +284,15 @@ public ValuesSourceConfig( this.format = format == null ? DocValueFormat.RAW : format; if (valid() == false) { - // TODO: resolve no longer generates invalid configs. Once VSConfig is immutable, we can drop this check + // TODO: resolve no longer generates invalid configs. Once VSConfig is immutable, we can drop this check throw new IllegalStateException( - "value source config is invalid; must have either a field context or a script or marked as unwrapped"); + "value source config is invalid; must have either a field context or a script or marked as unwrapped" + ); } valuesSource = constructValuesSource(missing, format, context); } - private ValuesSource constructValuesSource( - Object missing, - DocValueFormat format, - AggregationContext context - ) { + private ValuesSource constructValuesSource(Object missing, DocValueFormat format, AggregationContext context) { final ValuesSource vs; if (this.unmapped) { vs = valueSourceType().getEmpty(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java index 32498ac01bd2f..3a43f147fefa3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java @@ -64,7 +64,6 @@ public Builder() { this.usageServiceBuilder = new AggregationUsageService.Builder(); } - /** * Register a ValuesSource to Aggregator mapping. This method registers mappings that only apply to a * single {@link ValuesSourceType} @@ -80,7 +79,8 @@ public void register( RegistryKey registryKey, ValuesSourceType valuesSourceType, T aggregatorSupplier, - boolean registerUsage) { + boolean registerUsage + ) { if (aggregatorRegistry.containsKey(registryKey) == false) { aggregatorRegistry.put(registryKey, new ArrayList<>()); } @@ -105,7 +105,8 @@ public void register( RegistryKey registryKey, List valuesSourceTypes, T aggregatorSupplier, - boolean registerUsage) { + boolean registerUsage + ) { for (ValuesSourceType valuesSourceType : valuesSourceTypes) { register(registryKey, valuesSourceType, aggregatorSupplier, registerUsage); } @@ -131,13 +132,13 @@ public ValuesSourceRegistry build() { Make an immutable copy of our input map. Since this is write once, read many, we'll spend a bit of extra time to shape this into a Map.of(), which is more read optimized than just using a hash map. */ - @SuppressWarnings({"rawtypes", "unchecked"}) + @SuppressWarnings({ "rawtypes", "unchecked" }) Map.Entry, Map>[] copiedEntries = new Map.Entry[mutableMap.size()]; int i = 0; for (Map.Entry, List>> entry : mutableMap.entrySet()) { RegistryKey topKey = entry.getKey(); List> values = entry.getValue(); - @SuppressWarnings({"rawtypes", "unchecked"}) + @SuppressWarnings({ "rawtypes", "unchecked" }) Map.Entry, Map> newEntry = Map.entry( topKey, Map.ofEntries(values.toArray(new Map.Entry[0])) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java index 4628a4f075131..ed7e786b10b34 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java @@ -71,8 +71,7 @@ public interface ValuesSourceType { * @param context - Context for this aggregation used to handle {@link AggregationContext#nowInMillis() "now"} * @return - Wrapper over the provided {@link ValuesSource} to apply the given missing value */ - ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context); + ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, AggregationContext context); /** * This method provides a hook for specifying a type-specific formatter. When {@link ValuesSourceConfig} can resolve a diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java index a46cb423dec2c..da74f2d4e0b22 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java @@ -67,11 +67,8 @@ private static class DummyAdaptingAggregatorFactory extends AggregatorFactory { } @Override - protected Aggregator createInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { + protected Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { return new DummyAdaptingAggregator( parent, factories, @@ -117,7 +114,7 @@ protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucket @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return new InternalAggregation[] {null}; + return new InternalAggregation[] { null }; } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java index f3a9500ef74bc..6edb1491bb86b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java @@ -30,7 +30,7 @@ public class AggregationTestScriptsPlugin extends MockScriptPlugin { // List values = doc['values']; // double[] res = new double[values.size()]; // for (int i = 0; i < res.length; i++) { - // res[i] = values.get(i) - dec; + // res[i] = values.get(i) - dec; // }; // return res; public static final Script DECREMENT_ALL_VALUES = new Script(ScriptType.INLINE, NAME, "decrement all values", singletonMap("dec", 1)); @@ -57,32 +57,32 @@ protected Map, Object>> pluginScripts() { }); scripts.put("doc['value'].value", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("value"); }); scripts.put("doc['value'].value - dec", vars -> { int dec = (int) vars.get("dec"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value"); return value.getValue() - dec; }); scripts.put("doc['value'].value + inc", vars -> { int inc = (int) vars.get("inc"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value"); return value.getValue() + inc; }); scripts.put("doc['values']", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("values"); }); scripts.put(DECREMENT_ALL_VALUES.getIdOrCode(), vars -> { int dec = (int) vars.get("dec"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs values = (ScriptDocValues.Longs) doc.get("values"); double[] res = new double[values.size()]; @@ -95,13 +95,13 @@ protected Map, Object>> pluginScripts() { scripts.put("[ doc['value'].value, doc['value'].value - dec ]", vars -> { Long a = ((ScriptDocValues.Longs) scripts.get("doc['value'].value").apply(vars)).getValue(); Long b = (Long) scripts.get("doc['value'].value - dec").apply(vars); - return new Long[]{a, b}; + return new Long[] { a, b }; }); scripts.put("[ doc['value'].value, doc['value'].value + inc ]", vars -> { Long a = ((ScriptDocValues.Longs) scripts.get("doc['value'].value").apply(vars)).getValue(); Long b = (Long) scripts.get("doc['value'].value + inc").apply(vars); - return new Long[]{a, b}; + return new Long[] { a, b }; }); return scripts; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index aa95678c2ff8f..aa5d7b6450584 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -95,56 +95,57 @@ public class AggregationsTests extends ESTestCase { private static final List> aggsTests = List.of( - new InternalCardinalityTests(), - new InternalTDigestPercentilesTests(), - new InternalTDigestPercentilesRanksTests(), - new InternalHDRPercentilesTests(), - new InternalHDRPercentilesRanksTests(), - new InternalPercentilesBucketTests(), - new InternalMinTests(), - new InternalMaxTests(), - new InternalAvgTests(), - new InternalWeightedAvgTests(), - new InternalSumTests(), - new InternalValueCountTests(), - new InternalSimpleValueTests(), - new InternalDerivativeTests(), - new InternalBucketMetricValueTests(), - new InternalStatsTests(), - new InternalStatsBucketTests(), - new InternalExtendedStatsTests(), - new InternalExtendedStatsBucketTests(), - new InternalGeoBoundsTests(), - new InternalGeoCentroidTests(), - new InternalHistogramTests(), - new InternalDateHistogramTests(), - new InternalAutoDateHistogramTests(), - new InternalVariableWidthHistogramTests(), - new LongTermsTests(), - new DoubleTermsTests(), - new StringTermsTests(), - new LongRareTermsTests(), - new StringRareTermsTests(), - new InternalMissingTests(), - new InternalNestedTests(), - new InternalReverseNestedTests(), - new InternalGlobalTests(), - new InternalFilterTests(), - new InternalSamplerTests(), - new GeoHashGridTests(), - new GeoTileGridTests(), - new InternalRangeTests(), - new InternalDateRangeTests(), - new InternalGeoDistanceTests(), - new InternalFiltersTests(), - new InternalAdjacencyMatrixTests(), - new SignificantLongTermsTests(), - new SignificantStringTermsTests(), - new InternalScriptedMetricTests(), - new InternalBinaryRangeTests(), - new InternalTopHitsTests(), - new InternalCompositeTests(), - new InternalMedianAbsoluteDeviationTests()); + new InternalCardinalityTests(), + new InternalTDigestPercentilesTests(), + new InternalTDigestPercentilesRanksTests(), + new InternalHDRPercentilesTests(), + new InternalHDRPercentilesRanksTests(), + new InternalPercentilesBucketTests(), + new InternalMinTests(), + new InternalMaxTests(), + new InternalAvgTests(), + new InternalWeightedAvgTests(), + new InternalSumTests(), + new InternalValueCountTests(), + new InternalSimpleValueTests(), + new InternalDerivativeTests(), + new InternalBucketMetricValueTests(), + new InternalStatsTests(), + new InternalStatsBucketTests(), + new InternalExtendedStatsTests(), + new InternalExtendedStatsBucketTests(), + new InternalGeoBoundsTests(), + new InternalGeoCentroidTests(), + new InternalHistogramTests(), + new InternalDateHistogramTests(), + new InternalAutoDateHistogramTests(), + new InternalVariableWidthHistogramTests(), + new LongTermsTests(), + new DoubleTermsTests(), + new StringTermsTests(), + new LongRareTermsTests(), + new StringRareTermsTests(), + new InternalMissingTests(), + new InternalNestedTests(), + new InternalReverseNestedTests(), + new InternalGlobalTests(), + new InternalFilterTests(), + new InternalSamplerTests(), + new GeoHashGridTests(), + new GeoTileGridTests(), + new InternalRangeTests(), + new InternalDateRangeTests(), + new InternalGeoDistanceTests(), + new InternalFiltersTests(), + new InternalAdjacencyMatrixTests(), + new SignificantLongTermsTests(), + new SignificantStringTermsTests(), + new InternalScriptedMetricTests(), + new InternalBinaryRangeTests(), + new InternalTopHitsTests(), + new InternalCompositeTests(), + new InternalMedianAbsoluteDeviationTests() + ); @Override protected NamedXContentRegistry xContentRegistry() { @@ -223,13 +224,15 @@ private void parseAndAssert(boolean addRandomFields) throws IOException { * * - exclude "key", it can be an array of objects and we need strict values */ - Predicate excludes = path -> (path.isEmpty() || path.endsWith("aggregations") - || path.endsWith(Aggregation.CommonFields.META.getPreferredName()) - || path.endsWith(Aggregation.CommonFields.BUCKETS.getPreferredName()) - || path.endsWith(CommonFields.VALUES.getPreferredName()) || path.endsWith("covariance") || path.endsWith("correlation") - || path.contains(CommonFields.VALUE.getPreferredName()) - || path.endsWith(CommonFields.KEY.getPreferredName())) - || path.contains("top_hits"); + Predicate excludes = path -> (path.isEmpty() + || path.endsWith("aggregations") + || path.endsWith(Aggregation.CommonFields.META.getPreferredName()) + || path.endsWith(Aggregation.CommonFields.BUCKETS.getPreferredName()) + || path.endsWith(CommonFields.VALUES.getPreferredName()) + || path.endsWith("covariance") + || path.endsWith("correlation") + || path.contains(CommonFields.VALUE.getPreferredName()) + || path.endsWith(CommonFields.KEY.getPreferredName())) || path.contains("top_hits"); mutated = insertRandomFields(xContentType, originalBytes, excludes, random()); } else { mutated = originalBytes; @@ -273,13 +276,9 @@ private static InternalAggregations createTestInstance(final int minNumAggs, fin if (testCase instanceof InternalMultiBucketAggregationTestCase) { InternalMultiBucketAggregationTestCase multiBucketAggTestCase = (InternalMultiBucketAggregationTestCase) testCase; if (currentDepth < maxDepth) { - multiBucketAggTestCase.setSubAggregationsSupplier( - () -> createTestInstance(0, currentDepth + 1, maxDepth) - ); + multiBucketAggTestCase.setSubAggregationsSupplier(() -> createTestInstance(0, currentDepth + 1, maxDepth)); } else { - multiBucketAggTestCase.setSubAggregationsSupplier( - () -> InternalAggregations.EMPTY - ); + multiBucketAggTestCase.setSubAggregationsSupplier(() -> InternalAggregations.EMPTY); } } else if (testCase instanceof InternalSingleBucketAggregationTestCase) { InternalSingleBucketAggregationTestCase singleBucketAggTestCase = (InternalSingleBucketAggregationTestCase) testCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java index 297e702cde6d2..b864e213ae9c9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java @@ -118,35 +118,21 @@ public void testShortcutIsApplicable() throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "keyword"))); withAggregationContext(mapperService, List.of(source(b -> b.field("field", "abc"))), context -> { for (NumberFieldMapper.NumberType type : NumberFieldMapper.NumberType.values()) { - assertNotNull( - pointReaderShim(context(new MatchAllDocsQuery()), null, getVSConfig("number", type, true, context)) - ); + assertNotNull(pointReaderShim(context(new MatchAllDocsQuery()), null, getVSConfig("number", type, true, context))); assertNotNull(pointReaderShim(context(null), null, getVSConfig("number", type, true, context))); assertNull(pointReaderShim(context(null), mockAggregator(), getVSConfig("number", type, true, context))); assertNull( - pointReaderShim( - context(new TermQuery(new Term("foo", "bar"))), - null, - getVSConfig("number", type, true, context) - ) + pointReaderShim(context(new TermQuery(new Term("foo", "bar"))), null, getVSConfig("number", type, true, context)) ); assertNull(pointReaderShim(context(null), mockAggregator(), getVSConfig("number", type, true, context))); assertNull(pointReaderShim(context(null), null, getVSConfig("number", type, false, context))); } for (DateFieldMapper.Resolution resolution : DateFieldMapper.Resolution.values()) { assertNull( - pointReaderShim( - context(new MatchAllDocsQuery()), - mockAggregator(), - getVSConfig("number", resolution, true, context) - ) + pointReaderShim(context(new MatchAllDocsQuery()), mockAggregator(), getVSConfig("number", resolution, true, context)) ); assertNull( - pointReaderShim( - context(new TermQuery(new Term("foo", "bar"))), - null, - getVSConfig("number", resolution, true, context) - ) + pointReaderShim(context(new TermQuery(new Term("foo", "bar"))), null, getVSConfig("number", resolution, true, context)) ); assertNull(pointReaderShim(context(null), mockAggregator(), getVSConfig("number", resolution, true, context))); assertNull(pointReaderShim(context(null), null, getVSConfig("number", resolution, false, context))); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesBuilderTests.java index 82f5cf086041f..87b8026d8b143 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesBuilderTests.java @@ -127,14 +127,14 @@ private static AggregationBuilder getRandomAggregation() { // just a couple of aggregations, sufficient for the purpose of this test final int randomAggregatorPoolSize = 4; switch (randomIntBetween(1, randomAggregatorPoolSize)) { - case 1: - return AggregationBuilders.avg(randomAlphaOfLengthBetween(3, 10)).field("foo"); - case 2: - return AggregationBuilders.min(randomAlphaOfLengthBetween(3, 10)).field("foo"); - case 3: - return AggregationBuilders.max(randomAlphaOfLengthBetween(3, 10)).field("foo"); - case 4: - return AggregationBuilders.sum(randomAlphaOfLengthBetween(3, 10)).field("foo"); + case 1: + return AggregationBuilders.avg(randomAlphaOfLengthBetween(3, 10)).field("foo"); + case 2: + return AggregationBuilders.min(randomAlphaOfLengthBetween(3, 10)).field("foo"); + case 3: + return AggregationBuilders.max(randomAlphaOfLengthBetween(3, 10)).field("foo"); + case 4: + return AggregationBuilders.sum(randomAlphaOfLengthBetween(3, 10)).field("foo"); } // never reached diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java index 7e2895ad38b09..7e26488821e88 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java @@ -58,8 +58,10 @@ public class AggregatorFactoriesTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); - Settings settings = Settings.builder().put("node.name", AbstractQueryTestCase.class.toString()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); + Settings settings = Settings.builder() + .put("node.name", AbstractQueryTestCase.class.toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .build(); xContentRegistry = new NamedXContentRegistry(new SearchModule(settings, emptyList()).getNamedXContents()); } @@ -72,29 +74,32 @@ public void testGetAggregatorFactories_returnsUnmodifiableList() { public void testGetPipelineAggregatorFactories_returnsUnmodifiableList() { AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addPipelineAggregator( - PipelineAggregatorBuilders.avgBucket("foo", "path1")); + PipelineAggregatorBuilders.avgBucket("foo", "path1") + ); Collection pipelineAggregatorFactories = builder.getPipelineAggregatorFactories(); assertThat(pipelineAggregatorFactories.size(), equalTo(1)); - expectThrows(UnsupportedOperationException.class, - () -> pipelineAggregatorFactories.add(PipelineAggregatorBuilders.avgBucket("bar", "path2"))); + expectThrows( + UnsupportedOperationException.class, + () -> pipelineAggregatorFactories.add(PipelineAggregatorBuilders.avgBucket("bar", "path2")) + ); } public void testTwoTypes() throws Exception { XContentBuilder source = JsonXContent.contentBuilder() - .startObject() - .startObject("in_stock") - .startObject("filter") - .startObject("range") - .startObject("stock") - .field("gt", 0) - .endObject() - .endObject() - .endObject() - .startObject("terms") - .field("field", "stock") - .endObject() - .endObject() - .endObject(); + .startObject() + .startObject("in_stock") + .startObject("filter") + .startObject("range") + .startObject("stock") + .field("gt", 0) + .endObject() + .endObject() + .endObject() + .startObject("terms") + .field("field", "stock") + .endObject() + .endObject() + .endObject(); XContentParser parser = createParser(source); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser)); @@ -118,17 +123,17 @@ public void testInvalidAggregationName() throws Exception { } XContentBuilder source = JsonXContent.contentBuilder() - .startObject() - .startObject(name) - .startObject("filter") - .startObject("range") - .startObject("stock") - .field("gt", 0) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject(); + .startObject() + .startObject(name) + .startObject("filter") + .startObject("range") + .startObject("stock") + .field("gt", 0) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); XContentParser parser = createParser(source); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser)); @@ -137,22 +142,22 @@ public void testInvalidAggregationName() throws Exception { public void testMissingName() throws Exception { XContentBuilder source = JsonXContent.contentBuilder() - .startObject() - .startObject("by_date") - .startObject("date_histogram") - .field("field", "timestamp") - .field("calendar_interval", "month") - .endObject() - .startObject("aggs") - // the aggregation name is missing - //.startObject("tag_count") - .startObject("cardinality") - .field("field", "tag") - .endObject() - //.endObject() - .endObject() - .endObject() - .endObject(); + .startObject() + .startObject("by_date") + .startObject("date_histogram") + .field("field", "timestamp") + .field("calendar_interval", "month") + .endObject() + .startObject("aggs") + // the aggregation name is missing + // .startObject("tag_count") + .startObject("cardinality") + .field("field", "tag") + .endObject() + // .endObject() + .endObject() + .endObject() + .endObject(); XContentParser parser = createParser(source); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser)); @@ -161,22 +166,22 @@ public void testMissingName() throws Exception { public void testMissingType() throws Exception { XContentBuilder source = JsonXContent.contentBuilder() - .startObject() - .startObject("by_date") - .startObject("date_histogram") - .field("field", "timestamp") - .field("calendar_interval", "month") - .endObject() - .startObject("aggs") - .startObject("tag_count") - // the aggregation type is missing - //.startObject("cardinality") - .field("field", "tag") - //.endObject() - .endObject() - .endObject() - .endObject() - .endObject(); + .startObject() + .startObject("by_date") + .startObject("date_histogram") + .field("field", "timestamp") + .field("calendar_interval", "month") + .endObject() + .startObject("aggs") + .startObject("tag_count") + // the aggregation type is missing + // .startObject("cardinality") + .field("field", "tag") + // .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); XContentParser parser = createParser(source); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser)); @@ -185,22 +190,22 @@ public void testMissingType() throws Exception { public void testInvalidType() throws Exception { XContentBuilder source = JsonXContent.contentBuilder() - .startObject() - .startObject("by_date") - .startObject("date_histogram") - .field("field", "timestamp") - .field("calendar_interval", "month") - .endObject() - .startObject("aggs") - .startObject("tags") - // the aggregation type is invalid - .startObject("term") - .field("field", "tag") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject(); + .startObject() + .startObject("by_date") + .startObject("date_histogram") + .field("field", "timestamp") + .field("calendar_interval", "month") + .endObject() + .startObject("aggs") + .startObject("tags") + // the aggregation type is invalid + .startObject("term") + .field("field", "tag") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); XContentParser parser = createParser(source); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser)); @@ -225,9 +230,8 @@ public void testRewriteAggregation() throws Exception { FilterAggregationBuilder filterAggBuilder = new FilterAggregationBuilder("titles", new WrapperQueryBuilder(bytesReference)); BucketScriptPipelineAggregationBuilder pipelineAgg = new BucketScriptPipelineAggregationBuilder("const", new Script("1")); AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addAggregator(filterAggBuilder) - .addPipelineAggregator(pipelineAgg); - AggregatorFactories.Builder rewritten = builder - .rewrite(new QueryRewriteContext(xContentRegistry, null, null, () -> 0L)); + .addPipelineAggregator(pipelineAgg); + AggregatorFactories.Builder rewritten = builder.rewrite(new QueryRewriteContext(xContentRegistry, null, null, () -> 0L)); assertNotSame(builder, rewritten); Collection aggregatorFactories = rewritten.getAggregatorFactories(); assertEquals(1, aggregatorFactories.size()); @@ -240,14 +244,14 @@ public void testRewriteAggregation() throws Exception { assertThat(rewrittenFilter, instanceOf(TermsQueryBuilder.class)); // Check that a further rewrite returns the same aggregation factories builder - AggregatorFactories.Builder secondRewritten = rewritten - .rewrite(new QueryRewriteContext(xContentRegistry, null, null, () -> 0L)); + AggregatorFactories.Builder secondRewritten = rewritten.rewrite(new QueryRewriteContext(xContentRegistry, null, null, () -> 0L)); assertSame(rewritten, secondRewritten); } public void testRewritePipelineAggregationUnderAggregation() throws Exception { - FilterAggregationBuilder filterAggBuilder = new FilterAggregationBuilder("titles", new MatchAllQueryBuilder()) - .subAggregation(new RewrittenPipelineAggregationBuilder()); + FilterAggregationBuilder filterAggBuilder = new FilterAggregationBuilder("titles", new MatchAllQueryBuilder()).subAggregation( + new RewrittenPipelineAggregationBuilder() + ); AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addAggregator(filterAggBuilder); QueryRewriteContext context = new QueryRewriteContext(xContentRegistry, null, null, () -> 0L); AggregatorFactories.Builder rewritten = builder.rewrite(context); @@ -302,8 +306,7 @@ public void testBuildPipelineTreeResolvesPipelineOrder() { builder.addPipelineAggregator(PipelineAggregatorBuilders.avgBucket("foo", "real")); builder.addAggregator(AggregationBuilders.avg("real").field("target")); PipelineTree tree = builder.buildPipelineTree(); - assertThat(tree.aggregators().stream().map(PipelineAggregator::name).collect(toList()), - equalTo(List.of("foo", "bar"))); + assertThat(tree.aggregators().stream().map(PipelineAggregator::name).collect(toList()), equalTo(List.of("foo", "bar"))); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java index 4d19e76d4a605..d13cfa2d12984 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java @@ -38,7 +38,8 @@ public class InternalAggregationsTests extends ESTestCase { private final NamedWriteableRegistry registry = new NamedWriteableRegistry( - new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedWriteables()); + new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedWriteables() + ); public void testReduceEmptyAggs() { List aggs = Collections.emptyList(); @@ -47,28 +48,56 @@ public void testReduceEmptyAggs() { assertNull(InternalAggregations.reduce(aggs, reduceContext)); } - public void testNonFinalReduceTopLevelPipelineAggs() { - InternalAggregation terms = new StringTerms("name", BucketOrder.key(true), BucketOrder.key(true), - 10, 1, Collections.emptyMap(), DocValueFormat.RAW, 25, false, 10, Collections.emptyList(), 0L); + public void testNonFinalReduceTopLevelPipelineAggs() { + InternalAggregation terms = new StringTerms( + "name", + BucketOrder.key(true), + BucketOrder.key(true), + 10, + 1, + Collections.emptyMap(), + DocValueFormat.RAW, + 25, + false, + 10, + Collections.emptyList(), + 0L + ); List aggs = singletonList(InternalAggregations.from(Collections.singletonList(terms))); InternalAggregations reducedAggs = InternalAggregations.topLevelReduce(aggs, maxBucketReduceContext().forPartialReduction()); assertEquals(1, reducedAggs.aggregations.size()); } - public void testFinalReduceTopLevelPipelineAggs() { - InternalAggregation terms = new StringTerms("name", BucketOrder.key(true), BucketOrder.key(true), - 10, 1, Collections.emptyMap(), DocValueFormat.RAW, 25, false, 10, Collections.emptyList(), 0L); + public void testFinalReduceTopLevelPipelineAggs() { + InternalAggregation terms = new StringTerms( + "name", + BucketOrder.key(true), + BucketOrder.key(true), + 10, + 1, + Collections.emptyMap(), + DocValueFormat.RAW, + 25, + false, + 10, + Collections.emptyList(), + 0L + ); InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(terms)); - InternalAggregations reducedAggs = InternalAggregations.topLevelReduce(Collections.singletonList(aggs), - maxBucketReduceContext().forFinalReduction()); + InternalAggregations reducedAggs = InternalAggregations.topLevelReduce( + Collections.singletonList(aggs), + maxBucketReduceContext().forFinalReduction() + ); assertEquals(2, reducedAggs.aggregations.size()); } private InternalAggregation.ReduceContextBuilder maxBucketReduceContext() { MaxBucketPipelineAggregationBuilder maxBucketPipelineAggregationBuilder = new MaxBucketPipelineAggregationBuilder("test", "test"); - PipelineAggregator.PipelineTree tree = - new PipelineAggregator.PipelineTree(emptyMap(), singletonList(maxBucketPipelineAggregationBuilder.create())); + PipelineAggregator.PipelineTree tree = new PipelineAggregator.PipelineTree( + emptyMap(), + singletonList(maxBucketPipelineAggregationBuilder.create()) + ); return InternalAggregationTestCase.emptyReduceContextBuilder(tree); } @@ -99,8 +128,7 @@ public void testSerialization() throws Exception { public void testSerializedSize() throws Exception { InternalAggregations aggregations = createTestInstance(); - assertThat(DelayableWriteable.getSerializedSize(aggregations), - equalTo((long) serialize(aggregations, Version.CURRENT).length)); + assertThat(DelayableWriteable.getSerializedSize(aggregations), equalTo((long) serialize(aggregations, Version.CURRENT).length)); } private void writeToAndReadFrom(InternalAggregations aggregations, Version version, int iteration) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregationTests.java index d8c2679693e78..2724e7a9bf7b7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregationTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregationTests.java @@ -62,8 +62,10 @@ public void testResolveToNothing() { LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); - InvalidAggregationPathException e = expectThrows(InvalidAggregationPathException.class, - () -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms")); + InvalidAggregationPathException e = expectThrows( + InvalidAggregationPathException.class, + () -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms") + ); assertThat(e.getMessage(), equalTo("Cannot find an aggregation named [foo] in [the_long_terms]")); } @@ -76,8 +78,10 @@ public void testResolveToUnknown() { LongTerms.Bucket bucket = new LongTerms.Bucket(1, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms")); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms") + ); assertThat(e.getMessage(), equalTo("path not supported for [the_avg]: [unknown]")); } @@ -126,12 +130,31 @@ public void testResolveToSpecificBucket() { List buckets = new ArrayList<>(); InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap()); InternalAggregations internalStringAggs = InternalAggregations.from(Collections.singletonList(agg)); - List stringBuckets = Collections.singletonList(new StringTerms.Bucket( - new BytesRef("foo".getBytes(StandardCharsets.UTF_8), 0, "foo".getBytes(StandardCharsets.UTF_8).length), 1, - internalStringAggs, false, 0, DocValueFormat.RAW)); - - InternalTerms termsAgg = new StringTerms("string_terms", BucketOrder.count(false), BucketOrder.count(false), 1, 0, - Collections.emptyMap(), DocValueFormat.RAW, 1, false, 0, stringBuckets, 0L); + List stringBuckets = Collections.singletonList( + new StringTerms.Bucket( + new BytesRef("foo".getBytes(StandardCharsets.UTF_8), 0, "foo".getBytes(StandardCharsets.UTF_8).length), + 1, + internalStringAggs, + false, + 0, + DocValueFormat.RAW + ) + ); + + InternalTerms termsAgg = new StringTerms( + "string_terms", + BucketOrder.count(false), + BucketOrder.count(false), + 1, + 0, + Collections.emptyMap(), + DocValueFormat.RAW, + 1, + false, + 0, + stringBuckets, + 0L + ); InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(termsAgg)); LongTerms.Bucket bucket = new LongTerms.Bucket(19, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); @@ -146,18 +169,39 @@ public void testResolveToMissingSpecificBucket() { List buckets = new ArrayList<>(); InternalAggregation agg = new InternalAvg("the_avg", 2, 1, DocValueFormat.RAW, Collections.emptyMap()); InternalAggregations internalStringAggs = InternalAggregations.from(Collections.singletonList(agg)); - List stringBuckets = Collections.singletonList(new StringTerms.Bucket( - new BytesRef("foo".getBytes(StandardCharsets.UTF_8), 0, "foo".getBytes(StandardCharsets.UTF_8).length), 1, - internalStringAggs, false, 0, DocValueFormat.RAW)); - - InternalTerms termsAgg = new StringTerms("string_terms", BucketOrder.count(false), BucketOrder.count(false), 1, 0, - Collections.emptyMap(), DocValueFormat.RAW, 1, false, 0, stringBuckets, 0L); + List stringBuckets = Collections.singletonList( + new StringTerms.Bucket( + new BytesRef("foo".getBytes(StandardCharsets.UTF_8), 0, "foo".getBytes(StandardCharsets.UTF_8).length), + 1, + internalStringAggs, + false, + 0, + DocValueFormat.RAW + ) + ); + + InternalTerms termsAgg = new StringTerms( + "string_terms", + BucketOrder.count(false), + BucketOrder.count(false), + 1, + 0, + Collections.emptyMap(), + DocValueFormat.RAW, + 1, + false, + 0, + stringBuckets, + 0L + ); InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(termsAgg)); LongTerms.Bucket bucket = new LongTerms.Bucket(19, 1, internalAggregations, false, 0, DocValueFormat.RAW); buckets.add(bucket); - InvalidAggregationPathException e = expectThrows(InvalidAggregationPathException.class, - () -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms")); + InvalidAggregationPathException e = expectThrows( + InvalidAggregationPathException.class, + () -> resolvePropertyFromPath(path.getPathElementsAsStringList(), buckets, "the_long_terms") + ); assertThat(e.getMessage(), equalTo("Cannot find an key ['bar'] in [string_terms]")); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/InternalOrderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/InternalOrderTests.java index cc3c849db38f3..b8de1e26e2cb8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/InternalOrderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/InternalOrderTests.java @@ -33,10 +33,13 @@ protected BucketOrder createTestInstance() { } private BucketOrder getRandomOrder() { - switch(randomInt(2)) { - case 0: return BucketOrder.key(randomBoolean()); - case 1: return BucketOrder.count(randomBoolean()); - default: return BucketOrder.aggregation(randomAlphaOfLength(10), randomBoolean()); + switch (randomInt(2)) { + case 0: + return BucketOrder.key(randomBoolean()); + case 1: + return BucketOrder.count(randomBoolean()); + default: + return BucketOrder.aggregation(randomAlphaOfLength(10), randomBoolean()); } } @@ -67,7 +70,7 @@ protected void assertEqualInstances(BucketOrder expectedInstance, BucketOrder ne // compound and aggregation order because _key and _count orders are static instances. assertEquals(expectedInstance, newInstance); assertEquals(expectedInstance.hashCode(), newInstance.hashCode()); - if(expectedInstance instanceof CompoundOrder || expectedInstance instanceof InternalOrder.Aggregation) { + if (expectedInstance instanceof CompoundOrder || expectedInstance instanceof InternalOrder.Aggregation) { assertNotSame(newInstance, expectedInstance); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java index d6b0d11ff15c0..5313f39aa942f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java @@ -31,7 +31,7 @@ import static org.hamcrest.Matchers.equalTo; -public class MultiBucketCollectorTests extends ESTestCase { +public class MultiBucketCollectorTests extends ESTestCase { private static class ScoreAndDoc extends Scorable { float score; int doc = -1; @@ -92,8 +92,7 @@ private static class TotalHitCountBucketCollector extends BucketCollector { private int count = 0; - TotalHitCountBucketCollector() { - } + TotalHitCountBucketCollector() {} @Override public LeafBucketCollector getLeafCollector(LeafReaderContext context) { @@ -267,9 +266,7 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { assertFalse(setScorerCalled1.get()); assertTrue(setScorerCalled2.get()); - expectThrows(CollectionTerminatedException.class, () -> { - leafCollector.collect(1); - }); + expectThrows(CollectionTerminatedException.class, () -> { leafCollector.collect(1); }); setScorerCalled1.set(false); setScorerCalled2.set(false); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/SubAggCollectionModeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/SubAggCollectionModeTests.java index 500f4350fad61..317c41100cebf 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/SubAggCollectionModeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/SubAggCollectionModeTests.java @@ -62,7 +62,7 @@ public void testInvalidReadFrom() throws Exception { try (StreamInput in = out.bytes().streamInput()) { SubAggCollectionMode.readFromStream(in); fail("Expected IOException"); - } catch(IOException e) { + } catch (IOException e) { assertThat(e.getMessage(), containsString("Unknown SubAggCollectionMode ordinal [")); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java index 135e8b81ad4f9..08e9998388451 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java @@ -18,7 +18,7 @@ protected AutoDateHistogramAggregationBuilder createTestAggregatorBuilder() { AutoDateHistogramAggregationBuilder builder = new AutoDateHistogramAggregationBuilder(randomAlphaOfLengthBetween(1, 10)); builder.field(INT_FIELD_NAME); builder.setNumBuckets(randomIntBetween(1, 100000)); - //TODO[PCS]: add builder pattern here + // TODO[PCS]: add builder pattern here if (randomBoolean()) { builder.format("###.##"); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java index 060257d9f211f..0bba25d48a2f6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java @@ -161,7 +161,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { }); } - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/60021") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/60021") public void testBucketMergeAndDeleteLastEntry() throws Exception { testCase((deferringCollector, delegate) -> new LeafBucketCollector() { @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketUtilsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketUtilsTests.java index 929125ef91b1e..a02acc798adbf 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketUtilsTests.java @@ -15,15 +15,14 @@ public class BucketUtilsTests extends ESTestCase { public void testBadInput() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> BucketUtils.suggestShardSideQueueSize(0)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> BucketUtils.suggestShardSideQueueSize(0)); assertEquals(e.getMessage(), "size must be positive, got 0"); } public void testOverFlow() { for (int iter = 0; iter < 10; ++iter) { final int size = Integer.MAX_VALUE - randomInt(10); - final int shardSize = BucketUtils.suggestShardSideQueueSize( size); + final int shardSize = BucketUtils.suggestShardSideQueueSize(size); assertThat(shardSize, greaterThanOrEqualTo(shardSize)); } } @@ -31,12 +30,12 @@ public void testOverFlow() { public void testShardSizeIsGreaterThanGlobalSize() { for (int iter = 0; iter < 10; ++iter) { final int size = randomIntBetween(1, Integer.MAX_VALUE); - final int shardSize = BucketUtils.suggestShardSideQueueSize( size); + final int shardSize = BucketUtils.suggestShardSideQueueSize(size); assertThat(shardSize, greaterThanOrEqualTo(size)); } } - /*// You may use the code below to evaluate the impact of the BucketUtils.suggestShardSideQueueSize + /*// You may use the code below to evaluate the impact of the BucketUtils.suggestShardSideQueueSize // heuristic public static void main(String[] args) { final int numberOfUniqueTerms = 10000; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java index d00dd83dae838..b91fd78c12ec7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java @@ -25,10 +25,10 @@ import java.io.IOException; -public class BucketsAggregatorTests extends AggregatorTestCase{ +public class BucketsAggregatorTests extends AggregatorTestCase { - public BucketsAggregator buildMergeAggregator() throws IOException{ - try(Directory directory = newDirectory()) { + public BucketsAggregator buildMergeAggregator() throws IOException { + try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); document.add(new SortedNumericDocValuesField("numeric", 0)); @@ -64,35 +64,35 @@ public InternalAggregation buildEmptyAggregation() { } } - public void testBucketMergeNoDelete() throws IOException{ + public void testBucketMergeNoDelete() throws IOException { BucketsAggregator mergeAggregator = buildMergeAggregator(); mergeAggregator.grow(10); - for(int i = 0; i < 10; i++){ + for (int i = 0; i < 10; i++) { mergeAggregator.incrementBucketDocCount(i, i); } mergeAggregator.rewriteBuckets(10, bucket -> bucket % 5); - for(int i=0; i<5; i++) { + for (int i = 0; i < 5; i++) { // The i'th bucket should now have all docs whose index % 5 = i // This is buckets i and i + 5 // i + (i+5) = 2*i + 5 assertEquals(mergeAggregator.getDocCounts().get(i), (2 * i) + 5); } - for(int i=5; i<10; i++){ - assertEquals(mergeAggregator.getDocCounts().get(i), 0); + for (int i = 5; i < 10; i++) { + assertEquals(mergeAggregator.getDocCounts().get(i), 0); } } - public void testBucketMergeAndDelete() throws IOException{ + public void testBucketMergeAndDelete() throws IOException { BucketsAggregator mergeAggregator = buildMergeAggregator(); mergeAggregator.grow(10); int sum = 0; - for(int i = 0; i < 20; i++){ + for (int i = 0; i < 20; i++) { mergeAggregator.incrementBucketDocCount(i, i); - if(5 <= i && i < 15) { + if (5 <= i && i < 15) { sum += i; } } @@ -101,7 +101,7 @@ public void testBucketMergeAndDelete() throws IOException{ mergeAggregator.rewriteBuckets(10, bucket -> (5 <= bucket && bucket < 15) ? 5 : -1); assertEquals(mergeAggregator.getDocCounts().size(), 10); // Confirm that the 10 other buckets were deleted - for(int i=0; i<10; i++){ + for (int i = 0; i < 10; i++) { assertEquals(mergeAggregator.getDocCounts().get(i), i == 5 ? sum : 0); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java index 9ccdcbf24405a..4d477cfcadb2e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java @@ -32,9 +32,11 @@ protected DateRangeAggregationBuilder createTestAggregatorBuilder() { key = randomAlphaOfLengthBetween(1, 20); } double from = randomBoolean() ? Double.NEGATIVE_INFINITY : randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE - 1000); - double to = randomBoolean() ? Double.POSITIVE_INFINITY - : (Double.isInfinite(from) ? randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE) - : randomIntBetween((int) from, Integer.MAX_VALUE)); + double to = randomBoolean() + ? Double.POSITIVE_INFINITY + : (Double.isInfinite(from) + ? randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE) + : randomIntBetween((int) from, Integer.MAX_VALUE)); if (randomBoolean()) { factory.addRange(new Range(key, from, to)); } else { @@ -60,16 +62,18 @@ protected DateRangeAggregationBuilder createTestAggregatorBuilder() { } public void testParsingRangeStrict() throws IOException { - final String rangeAggregation = "{\n" + - "\"field\" : \"date\",\n" + - "\"format\" : \"yyyy-MM-dd\",\n" + - "\"ranges\" : [\n" + - " { \"from\" : \"2017-01-01\", \"to\" : \"2017-01-02\", \"badField\" : \"abcd\" }\n" + - "]\n" + - "}"; + final String rangeAggregation = "{\n" + + "\"field\" : \"date\",\n" + + "\"format\" : \"yyyy-MM-dd\",\n" + + "\"ranges\" : [\n" + + " { \"from\" : \"2017-01-01\", \"to\" : \"2017-01-02\", \"badField\" : \"abcd\" }\n" + + "]\n" + + "}"; XContentParser parser = createParser(JsonXContent.jsonXContent, rangeAggregation); - XContentParseException ex = expectThrows(XContentParseException.class, - () -> DateRangeAggregationBuilder.PARSER.parse(parser, "aggregationName")); + XContentParseException ex = expectThrows( + XContentParseException.class, + () -> DateRangeAggregationBuilder.PARSER.parse(parser, "aggregationName") + ); assertThat(ex.getCause(), notNullValue()); assertThat(ex.getCause().getMessage(), containsString("badField")); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateScriptMocksPlugin.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateScriptMocksPlugin.java index 567c954005b3b..13a9906ec2d39 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateScriptMocksPlugin.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateScriptMocksPlugin.java @@ -37,10 +37,11 @@ public Map, Object>> pluginScripts() { String fieldname = (String) params.get("fieldname"); return docLookup.get(fieldname); }); - scripts.put(DOUBLE_PLUS_ONE_MONTH, params -> - new DateTime(Double.valueOf((double) params.get("_value")).longValue(), DateTimeZone.UTC).plusMonths(1).getMillis()); - scripts.put(LONG_PLUS_ONE_MONTH, params -> - new DateTime((long) params.get("_value"), DateTimeZone.UTC).plusMonths(1).getMillis()); + scripts.put( + DOUBLE_PLUS_ONE_MONTH, + params -> new DateTime(Double.valueOf((double) params.get("_value")).longValue(), DateTimeZone.UTC).plusMonths(1).getMillis() + ); + scripts.put(LONG_PLUS_ONE_MONTH, params -> new DateTime((long) params.get("_value"), DateTimeZone.UTC).plusMonths(1).getMillis()); return scripts; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DocCountProviderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DocCountProviderTests.java index 13e931562ced2..7b09ad5178ff1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DocCountProviderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DocCountProviderTests.java @@ -30,7 +30,6 @@ import static java.util.Collections.singleton; - public class DocCountProviderTests extends AggregatorTestCase { private static final String DOC_COUNT_FIELD = DocCountFieldMapper.NAME; @@ -38,21 +37,19 @@ public class DocCountProviderTests extends AggregatorTestCase { public void testDocsWithDocCount() throws IOException { testAggregation(new MatchAllDocsQuery(), iw -> { - iw.addDocument(List.of( - new CustomTermFreqField(DOC_COUNT_FIELD, DOC_COUNT_FIELD, 4), - new SortedNumericDocValuesField(NUMBER_FIELD, 1) - )); - iw.addDocument(List.of( - new CustomTermFreqField(DOC_COUNT_FIELD, DOC_COUNT_FIELD, 5), - new SortedNumericDocValuesField(NUMBER_FIELD, 7) - )); - iw.addDocument(List.of( - // Intentionally omit doc_count field - new SortedNumericDocValuesField(NUMBER_FIELD, 1) - )); - }, global -> { - assertEquals(10, global.getDocCount()); - }); + iw.addDocument( + List.of(new CustomTermFreqField(DOC_COUNT_FIELD, DOC_COUNT_FIELD, 4), new SortedNumericDocValuesField(NUMBER_FIELD, 1)) + ); + iw.addDocument( + List.of(new CustomTermFreqField(DOC_COUNT_FIELD, DOC_COUNT_FIELD, 5), new SortedNumericDocValuesField(NUMBER_FIELD, 7)) + ); + iw.addDocument( + List.of( + // Intentionally omit doc_count field + new SortedNumericDocValuesField(NUMBER_FIELD, 1) + ) + ); + }, global -> { assertEquals(10, global.getDocCount()); }); } public void testDocsWithoutDocCount() throws IOException { @@ -60,37 +57,30 @@ public void testDocsWithoutDocCount() throws IOException { iw.addDocument(singleton(new SortedNumericDocValuesField(NUMBER_FIELD, 1))); iw.addDocument(singleton(new SortedNumericDocValuesField(NUMBER_FIELD, 7))); iw.addDocument(singleton(new SortedNumericDocValuesField(NUMBER_FIELD, 1))); - }, global -> { - assertEquals(3, global.getDocCount()); - }); + }, global -> { assertEquals(3, global.getDocCount()); }); } public void testQueryFiltering() throws IOException { testAggregation(IntPoint.newRangeQuery(NUMBER_FIELD, 4, 5), iw -> { - iw.addDocument(List.of( - new CustomTermFreqField(DOC_COUNT_FIELD, DOC_COUNT_FIELD, 4), - new IntPoint(NUMBER_FIELD, 6) - )); - iw.addDocument(List.of( - new CustomTermFreqField(DOC_COUNT_FIELD, DOC_COUNT_FIELD, 2), - new IntPoint(NUMBER_FIELD, 5) - )); - iw.addDocument(List.of( - // Intentionally omit doc_count field - new IntPoint(NUMBER_FIELD, 1) - )); - iw.addDocument(List.of( - // Intentionally omit doc_count field - new IntPoint(NUMBER_FIELD, 5) - )); - }, global -> { - assertEquals(3, global.getDocCount()); - }); + iw.addDocument(List.of(new CustomTermFreqField(DOC_COUNT_FIELD, DOC_COUNT_FIELD, 4), new IntPoint(NUMBER_FIELD, 6))); + iw.addDocument(List.of(new CustomTermFreqField(DOC_COUNT_FIELD, DOC_COUNT_FIELD, 2), new IntPoint(NUMBER_FIELD, 5))); + iw.addDocument( + List.of( + // Intentionally omit doc_count field + new IntPoint(NUMBER_FIELD, 1) + ) + ); + iw.addDocument( + List.of( + // Intentionally omit doc_count field + new IntPoint(NUMBER_FIELD, 5) + ) + ); + }, global -> { assertEquals(3, global.getDocCount()); }); } - private void testAggregation(Query query, - CheckedConsumer indexer, - Consumer verify) throws IOException { + private void testAggregation(Query query, CheckedConsumer indexer, Consumer verify) + throws IOException { AggregationBuilder builder = new FilterAggregationBuilder("f", new MatchAllQueryBuilder()); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD, NumberFieldMapper.NumberType.LONG); MappedFieldType docCountFieldType = new DocCountFieldMapper.DocCountFieldType(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterTests.java index 1a848b808efaf..d5949ae3114f7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterTests.java @@ -16,8 +16,10 @@ public class FilterTests extends BaseAggregationTestCase randomAlphaOfLengthBetween(1, 20), size)) { - filters[i++] = new KeyedFilter(key, - QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20))); + filters[i++] = new KeyedFilter( + key, + QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20)) + ); } factory = new FiltersAggregationBuilder(randomAlphaOfLengthBetween(1, 20), filters); } else { @@ -67,8 +69,9 @@ protected FiltersAggregationBuilder createTestAggregatorBuilder() { * Also check the list passed in is not modified by this but rather copied */ public void testFiltersSortedByKey() { - KeyedFilter[] original = new KeyedFilter[]{new KeyedFilter("bbb", new MatchNoneQueryBuilder()), - new KeyedFilter("aaa", new MatchNoneQueryBuilder())}; + KeyedFilter[] original = new KeyedFilter[] { + new KeyedFilter("bbb", new MatchNoneQueryBuilder()), + new KeyedFilter("aaa", new MatchNoneQueryBuilder()) }; FiltersAggregationBuilder builder; builder = new FiltersAggregationBuilder("my-agg", original); assertEquals("aaa", builder.filters().get(0).key()); @@ -81,9 +84,7 @@ public void testFiltersSortedByKey() { public void testOtherBucket() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); - builder.startArray("filters") - .startObject().startObject("term").field("field", "foo").endObject().endObject() - .endArray(); + builder.startArray("filters").startObject().startObject("term").field("field", "foo").endObject().endObject().endArray(); builder.endObject(); try (XContentParser parser = createParser(shuffleXContent(builder))) { parser.nextToken(); @@ -93,9 +94,7 @@ public void testOtherBucket() throws IOException { builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); - builder.startArray("filters") - .startObject().startObject("term").field("field", "foo").endObject().endObject() - .endArray(); + builder.startArray("filters").startObject().startObject("term").field("field", "foo").endObject().endObject().endArray(); builder.field("other_bucket_key", "some_key"); builder.endObject(); } @@ -107,9 +106,7 @@ public void testOtherBucket() throws IOException { builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); - builder.startArray("filters") - .startObject().startObject("term").field("field", "foo").endObject().endObject() - .endArray(); + builder.startArray("filters").startObject().startObject("term").field("field", "foo").endObject().endObject().endArray(); builder.field("other_bucket", false); builder.field("other_bucket_key", "some_key"); builder.endObject(); @@ -141,12 +138,12 @@ public void testRewrite() throws IOException { assertFalse(((FiltersAggregationBuilder) rewritten).isKeyed()); // test keyed filter that doesn't rewrite - original = new FiltersAggregationBuilder("my-agg", new KeyedFilter("my-filter", new MatchAllQueryBuilder())); + original = new FiltersAggregationBuilder("my-agg", new KeyedFilter("my-filter", new MatchAllQueryBuilder())); rewritten = original.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)); assertSame(original, rewritten); // test non-keyed filter that does rewrite - original = new FiltersAggregationBuilder("my-agg", new KeyedFilter("my-filter", new BoolQueryBuilder())); + original = new FiltersAggregationBuilder("my-agg", new KeyedFilter("my-filter", new BoolQueryBuilder())); rewritten = original.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)); assertNotSame(original, rewritten); assertThat(rewritten, instanceOf(FiltersAggregationBuilder.class)); @@ -158,9 +155,7 @@ public void testRewrite() throws IOException { // test sub-agg filter that does rewrite original = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.BOOLEAN) - .subAggregation( - new FiltersAggregationBuilder("my-agg", new KeyedFilter("my-filter", new BoolQueryBuilder())) - ); + .subAggregation(new FiltersAggregationBuilder("my-agg", new KeyedFilter("my-filter", new BoolQueryBuilder()))); rewritten = original.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)); assertNotSame(original, rewritten); assertNotEquals(original, rewritten); @@ -170,8 +165,7 @@ public void testRewrite() throws IOException { assertThat(subAgg, instanceOf(FiltersAggregationBuilder.class)); assertNotSame(original.getSubAggregations().iterator().next(), subAgg); assertEquals("my-agg", subAgg.getName()); - assertSame(rewritten, - rewritten.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L))); + assertSame(rewritten, rewritten.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L))); } public void testRewritePreservesOtherBucket() throws IOException { @@ -179,8 +173,7 @@ public void testRewritePreservesOtherBucket() throws IOException { originalFilters.otherBucket(randomBoolean()); originalFilters.otherBucketKey(randomAlphaOfLength(10)); - AggregationBuilder rewritten = originalFilters.rewrite(new QueryRewriteContext(xContentRegistry(), - null, null, () -> 0L)); + AggregationBuilder rewritten = originalFilters.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)); assertThat(rewritten, instanceOf(FiltersAggregationBuilder.class)); FiltersAggregationBuilder rewrittenFilters = (FiltersAggregationBuilder) rewritten; @@ -196,8 +189,10 @@ public void testEmptyFilters() throws IOException { builder.endObject(); XContentParser parser = createParser(shuffleXContent(builder)); parser.nextToken(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> FiltersAggregationBuilder.parse("agg_name", parser)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> FiltersAggregationBuilder.parse("agg_name", parser) + ); assertThat(e.getMessage(), equalTo("[filters] cannot be empty.")); } @@ -208,8 +203,10 @@ public void testEmptyFilters() throws IOException { builder.endObject(); XContentParser parser = createParser(shuffleXContent(builder)); parser.nextToken(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> FiltersAggregationBuilder.parse("agg_name", parser)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> FiltersAggregationBuilder.parse("agg_name", parser) + ); assertThat(e.getMessage(), equalTo("[filters] cannot be empty.")); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java index afe7858846992..fd106ae5303a5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java @@ -31,17 +31,19 @@ public class GeoDistanceRangeTests extends BaseAggregationTestCase GeoDistanceAggregationBuilder.parse("aggregationName", parser)); + XContentParseException ex = expectThrows( + XContentParseException.class, + () -> GeoDistanceAggregationBuilder.parse("aggregationName", parser) + ); assertThat(ex.getCause(), notNullValue()); assertThat(ex.getCause().getMessage(), containsString("badField")); } @@ -80,17 +84,19 @@ public void testParsingRangeStrict() throws IOException { * We never render "null" values to xContent, but we should test that we can parse them (and they return correct defaults) */ public void testParsingNull() throws IOException { - final String rangeAggregation = "{\n" + - "\"field\" : \"location\",\n" + - "\"origin\" : \"52.3760, 4.894\",\n" + - "\"unit\" : \"m\",\n" + - "\"ranges\" : [\n" + - " { \"from\" : null, \"to\" : null }\n" + - "]\n" + - "}"; + final String rangeAggregation = "{\n" + + "\"field\" : \"location\",\n" + + "\"origin\" : \"52.3760, 4.894\",\n" + + "\"unit\" : \"m\",\n" + + "\"ranges\" : [\n" + + " { \"from\" : null, \"to\" : null }\n" + + "]\n" + + "}"; XContentParser parser = createParser(JsonXContent.jsonXContent, rangeAggregation); - GeoDistanceAggregationBuilder aggregationBuilder = (GeoDistanceAggregationBuilder) GeoDistanceAggregationBuilder - .parse("aggregationName", parser); + GeoDistanceAggregationBuilder aggregationBuilder = (GeoDistanceAggregationBuilder) GeoDistanceAggregationBuilder.parse( + "aggregationName", + parser + ); assertEquals(1, aggregationBuilder.range().size()); assertEquals(0.0, aggregationBuilder.range().get(0).getFrom(), 0.0); assertEquals(Double.POSITIVE_INFINITY, aggregationBuilder.range().get(0).getTo(), 0.0); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java index e979a4c212d54..481d8d00e616e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java @@ -54,12 +54,18 @@ public void testSerializationPreBounds() throws Exception { try (BytesStreamOutput output = new BytesStreamOutput()) { output.setVersion(Version.V_7_6_0); builder.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), - new NamedWriteableRegistry(Collections.emptyList()))) { + try ( + StreamInput in = new NamedWriteableAwareStreamInput( + output.bytes().streamInput(), + new NamedWriteableRegistry(Collections.emptyList()) + ) + ) { in.setVersion(noBoundsSupportVersion); GeoHashGridAggregationBuilder readBuilder = new GeoHashGridAggregationBuilder(in); - assertThat(readBuilder.geoBoundingBox(), equalTo(new GeoBoundingBox( - new GeoPoint(Double.NaN, Double.NaN), new GeoPoint(Double.NaN, Double.NaN)))); + assertThat( + readBuilder.geoBoundingBox(), + equalTo(new GeoBoundingBox(new GeoPoint(Double.NaN, Double.NaN), new GeoPoint(Double.NaN, Double.NaN))) + ); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoTileGridTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoTileGridTests.java index 60c80c53a9d16..01a6b91ab26e9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoTileGridTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoTileGridTests.java @@ -54,12 +54,18 @@ public void testSerializationPreBounds() throws Exception { try (BytesStreamOutput output = new BytesStreamOutput()) { output.setVersion(Version.V_7_6_0); builder.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), - new NamedWriteableRegistry(Collections.emptyList()))) { + try ( + StreamInput in = new NamedWriteableAwareStreamInput( + output.bytes().streamInput(), + new NamedWriteableRegistry(Collections.emptyList()) + ) + ) { in.setVersion(noBoundsSupportVersion); GeoTileGridAggregationBuilder readBuilder = new GeoTileGridAggregationBuilder(in); - assertThat(readBuilder.geoBoundingBox(), equalTo(new GeoBoundingBox( - new GeoPoint(Double.NaN, Double.NaN), new GeoPoint(Double.NaN, Double.NaN)))); + assertThat( + readBuilder.geoBoundingBox(), + equalTo(new GeoBoundingBox(new GeoPoint(Double.NaN, Double.NaN), new GeoPoint(Double.NaN, Double.NaN))) + ); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java index f2a2e1cb04536..9f8c8bcd14a90 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java @@ -9,8 +9,8 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import java.util.ArrayList; import java.util.List; @@ -47,7 +47,7 @@ protected HistogramAggregationBuilder createTestAggregatorBuilder() { } if (randomBoolean()) { List order = randomOrder(); - if(order.size() == 1 && randomBoolean()) { + if (order.size() == 1 && randomBoolean()) { factory.order(order.get(0)); } else { factory.order(order); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java index 33354c933c029..c7c987d4a95a1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java @@ -24,28 +24,28 @@ protected IpRangeAggregationBuilder createTestAggregatorBuilder() { key = randomAlphaOfLengthBetween(1, 20); } switch (randomInt(3)) { - case 0: - boolean v4 = randomBoolean(); - int prefixLength; - if (v4) { - prefixLength = randomInt(32); - } else { - prefixLength = randomInt(128); - } - factory.addMaskRange(key, NetworkAddress.format(randomIp(v4)) + "/" + prefixLength); - break; - case 1: - factory.addUnboundedFrom(key, NetworkAddress.format(randomIp(randomBoolean()))); - break; - case 2: - factory.addUnboundedTo(key, NetworkAddress.format(randomIp(randomBoolean()))); - break; - case 3: - v4 = randomBoolean(); - factory.addRange(key, NetworkAddress.format(randomIp(v4)), NetworkAddress.format(randomIp(v4))); - break; - default: - fail(); + case 0: + boolean v4 = randomBoolean(); + int prefixLength; + if (v4) { + prefixLength = randomInt(32); + } else { + prefixLength = randomInt(128); + } + factory.addMaskRange(key, NetworkAddress.format(randomIp(v4)) + "/" + prefixLength); + break; + case 1: + factory.addUnboundedFrom(key, NetworkAddress.format(randomIp(randomBoolean()))); + break; + case 2: + factory.addUnboundedTo(key, NetworkAddress.format(randomIp(randomBoolean()))); + break; + case 3: + v4 = randomBoolean(); + factory.addRange(key, NetworkAddress.format(randomIp(v4)), NetworkAddress.format(randomIp(v4))); + break; + default: + fail(); } } factory.field(IP_FIELD_NAME); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java index 998e85c450c1f..8c2b6751177ea 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java @@ -32,9 +32,11 @@ protected RangeAggregationBuilder createTestAggregatorBuilder() { key = randomAlphaOfLengthBetween(1, 20); } double from = randomBoolean() ? Double.NEGATIVE_INFINITY : randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE - 1000); - double to = randomBoolean() ? Double.POSITIVE_INFINITY - : (Double.isInfinite(from) ? randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE) - : randomIntBetween((int) from, Integer.MAX_VALUE)); + double to = randomBoolean() + ? Double.POSITIVE_INFINITY + : (Double.isInfinite(from) + ? randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE) + : randomIntBetween((int) from, Integer.MAX_VALUE)); if (randomBoolean()) { factory.addRange(new Range(key, from, to)); } else { @@ -57,15 +59,17 @@ protected RangeAggregationBuilder createTestAggregatorBuilder() { } public void testParsingRangeStrict() throws IOException { - final String rangeAggregation = "{\n" + - "\"field\" : \"price\",\n" + - "\"ranges\" : [\n" + - " { \"from\" : 50, \"to\" : 100, \"badField\" : \"abcd\" }\n" + - "]\n" + - "}"; + final String rangeAggregation = "{\n" + + "\"field\" : \"price\",\n" + + "\"ranges\" : [\n" + + " { \"from\" : 50, \"to\" : 100, \"badField\" : \"abcd\" }\n" + + "]\n" + + "}"; XContentParser parser = createParser(JsonXContent.jsonXContent, rangeAggregation); - XContentParseException ex = expectThrows(XContentParseException.class, - () -> RangeAggregationBuilder.PARSER.parse(parser, "aggregationName")); + XContentParseException ex = expectThrows( + XContentParseException.class, + () -> RangeAggregationBuilder.PARSER.parse(parser, "aggregationName") + ); assertThat(ex.getCause(), notNullValue()); assertThat(ex.getCause().getMessage(), containsString("badField")); } @@ -74,12 +78,12 @@ public void testParsingRangeStrict() throws IOException { * We never render "null" values to xContent, but we should test that we can parse them (and they return correct defaults) */ public void testParsingNull() throws IOException { - final String rangeAggregation = "{\n" + - "\"field\" : \"price\",\n" + - "\"ranges\" : [\n" + - " { \"from\" : null, \"to\" : null }\n" + - "]\n" + - "}"; + final String rangeAggregation = "{\n" + + "\"field\" : \"price\",\n" + + "\"ranges\" : [\n" + + " { \"from\" : null, \"to\" : null }\n" + + "]\n" + + "}"; XContentParser parser = createParser(JsonXContent.jsonXContent, rangeAggregation); RangeAggregationBuilder aggregationBuilder = RangeAggregationBuilder.PARSER.parse(parser, "aggregationName"); assertEquals(1, aggregationBuilder.ranges().size()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java index fcbb4ebf7259b..98c6cfee17b05 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -31,8 +31,7 @@ protected int numberOfShards() { } protected void createIdx(String keyFieldMapping) { - assertAcked(prepareCreate("idx") - .setMapping("key", keyFieldMapping)); + assertAcked(prepareCreate("idx").setMapping("key", keyFieldMapping)); } protected static String routing1; // routing key to shard 1 @@ -75,7 +74,7 @@ protected void indexData() throws Exception { docs.addAll(indexDoc(routing2, "4", 2)); docs.addAll(indexDoc(routing2, "5", 1)); - // total docs in shard "2" = 12 + // total docs in shard "2" = 12 indexRandom(true, docs); @@ -92,11 +91,9 @@ protected void indexData() throws Exception { protected List indexDoc(String shard, String key, int times) throws Exception { IndexRequestBuilder[] builders = new IndexRequestBuilder[times]; for (int i = 0; i < times; i++) { - builders[i] = client().prepareIndex("idx").setRouting(shard).setSource(jsonBuilder() - .startObject() - .field("key", key) - .field("value", 1) - .endObject()); + builders[i] = client().prepareIndex("idx") + .setRouting(shard) + .setSource(jsonBuilder().startObject().field("key", key).field("value", 1).endObject()); } return Arrays.asList(builders); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java index c36fb191f4207..0fcd5381c1a15 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java @@ -58,30 +58,30 @@ protected SignificantTermsAggregationBuilder createTestAggregatorBuilder() { if (randomBoolean()) { int minDocCount = randomInt(4); switch (minDocCount) { - case 0: - break; - case 1: - case 2: - case 3: - case 4: - minDocCount = randomIntBetween(0, Integer.MAX_VALUE); - break; + case 0: + break; + case 1: + case 2: + case 3: + case 4: + minDocCount = randomIntBetween(0, Integer.MAX_VALUE); + break; } factory.bucketCountThresholds().setMinDocCount(minDocCount); } if (randomBoolean()) { int shardMinDocCount = randomInt(4); switch (shardMinDocCount) { - case 0: - break; - case 1: - case 2: - case 3: - case 4: - shardMinDocCount = randomIntBetween(0, Integer.MAX_VALUE); - break; - default: - fail(); + case 0: + break; + case 1: + case 2: + case 3: + case 4: + shardMinDocCount = randomIntBetween(0, Integer.MAX_VALUE); + break; + default: + fail(); } factory.bucketCountThresholds().setShardMinDocCount(shardMinDocCount); } @@ -108,26 +108,26 @@ protected SignificantTermsAggregationBuilder createTestAggregatorBuilder() { static SignificanceHeuristic getSignificanceHeuristic() { SignificanceHeuristic significanceHeuristic = null; switch (randomInt(5)) { - case 0: - significanceHeuristic = new PercentageScore(); - break; - case 1: - significanceHeuristic = new ChiSquare(randomBoolean(), randomBoolean()); - break; - case 2: - significanceHeuristic = new GND(randomBoolean()); - break; - case 3: - significanceHeuristic = new MutualInformation(randomBoolean(), randomBoolean()); - break; - case 4: - significanceHeuristic = new ScriptHeuristic(mockScript("foo")); - break; - case 5: - significanceHeuristic = new JLHScore(); - break; - default: - fail(); + case 0: + significanceHeuristic = new PercentageScore(); + break; + case 1: + significanceHeuristic = new ChiSquare(randomBoolean(), randomBoolean()); + break; + case 2: + significanceHeuristic = new GND(randomBoolean()); + break; + case 3: + significanceHeuristic = new MutualInformation(randomBoolean(), randomBoolean()); + break; + case 4: + significanceHeuristic = new ScriptHeuristic(mockScript("foo")); + break; + case 5: + significanceHeuristic = new JLHScore(); + break; + default: + fail(); } return significanceHeuristic; } @@ -135,48 +135,48 @@ static SignificanceHeuristic getSignificanceHeuristic() { static IncludeExclude getIncludeExclude() { IncludeExclude incExc = null; switch (randomInt(5)) { - case 0: - incExc = new IncludeExclude(new RegExp("foobar"), null); - break; - case 1: - incExc = new IncludeExclude(null, new RegExp("foobaz")); - break; - case 2: - incExc = new IncludeExclude(new RegExp("foobar"), new RegExp("foobaz")); - break; - case 3: - SortedSet includeValues = new TreeSet<>(); - int numIncs = randomIntBetween(1, 20); - for (int i = 0; i < numIncs; i++) { - includeValues.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); - } - SortedSet excludeValues = null; - incExc = new IncludeExclude(includeValues, excludeValues); - break; - case 4: - SortedSet includeValues2 = null; - SortedSet excludeValues2 = new TreeSet<>(); - int numExcs2 = randomIntBetween(1, 20); - for (int i = 0; i < numExcs2; i++) { - excludeValues2.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); - } - incExc = new IncludeExclude(includeValues2, excludeValues2); - break; - case 5: - SortedSet includeValues3 = new TreeSet<>(); - int numIncs3 = randomIntBetween(1, 20); - for (int i = 0; i < numIncs3; i++) { - includeValues3.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); - } - SortedSet excludeValues3 = new TreeSet<>(); - int numExcs3 = randomIntBetween(1, 20); - for (int i = 0; i < numExcs3; i++) { - excludeValues3.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); - } - incExc = new IncludeExclude(includeValues3, excludeValues3); - break; - default: - fail(); + case 0: + incExc = new IncludeExclude(new RegExp("foobar"), null); + break; + case 1: + incExc = new IncludeExclude(null, new RegExp("foobaz")); + break; + case 2: + incExc = new IncludeExclude(new RegExp("foobar"), new RegExp("foobaz")); + break; + case 3: + SortedSet includeValues = new TreeSet<>(); + int numIncs = randomIntBetween(1, 20); + for (int i = 0; i < numIncs; i++) { + includeValues.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); + } + SortedSet excludeValues = null; + incExc = new IncludeExclude(includeValues, excludeValues); + break; + case 4: + SortedSet includeValues2 = null; + SortedSet excludeValues2 = new TreeSet<>(); + int numExcs2 = randomIntBetween(1, 20); + for (int i = 0; i < numExcs2; i++) { + excludeValues2.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); + } + incExc = new IncludeExclude(includeValues2, excludeValues2); + break; + case 5: + SortedSet includeValues3 = new TreeSet<>(); + int numIncs3 = randomIntBetween(1, 20); + for (int i = 0; i < numIncs3; i++) { + includeValues3.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); + } + SortedSet excludeValues3 = new TreeSet<>(); + int numExcs3 = randomIntBetween(1, 20); + for (int i = 0; i < numExcs3; i++) { + excludeValues3.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); + } + incExc = new IncludeExclude(includeValues3, excludeValues3); + break; + default: + fail(); } return incExc; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTextTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTextTests.java index fda97f7dca7bb..42282b73e779b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTextTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTextTests.java @@ -27,7 +27,7 @@ protected SignificantTextAggregationBuilder createTestAggregatorBuilder() { factory.bucketCountThresholds().setRequiredSize(randomIntBetween(1, Integer.MAX_VALUE)); } if (randomBoolean()) { - factory.sourceFieldNames(Arrays.asList(new String []{"foo", "bar"})); + factory.sourceFieldNames(Arrays.asList(new String[] { "foo", "bar" })); } if (randomBoolean()) { @@ -36,30 +36,30 @@ protected SignificantTextAggregationBuilder createTestAggregatorBuilder() { if (randomBoolean()) { int minDocCount = randomInt(4); switch (minDocCount) { - case 0: - break; - case 1: - case 2: - case 3: - case 4: - minDocCount = randomIntBetween(0, Integer.MAX_VALUE); - break; + case 0: + break; + case 1: + case 2: + case 3: + case 4: + minDocCount = randomIntBetween(0, Integer.MAX_VALUE); + break; } factory.bucketCountThresholds().setMinDocCount(minDocCount); } if (randomBoolean()) { int shardMinDocCount = randomInt(4); switch (shardMinDocCount) { - case 0: - break; - case 1: - case 2: - case 3: - case 4: - shardMinDocCount = randomIntBetween(0, Integer.MAX_VALUE); - break; - default: - fail(); + case 0: + break; + case 1: + case 2: + case 3: + case 4: + shardMinDocCount = randomIntBetween(0, Integer.MAX_VALUE); + break; + default: + fail(); } factory.bucketCountThresholds().setShardMinDocCount(shardMinDocCount); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java index 9c61d7af6e94e..a3c81f1574ce2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java @@ -52,32 +52,32 @@ protected TermsAggregationBuilder createTestAggregatorBuilder() { if (randomBoolean()) { int minDocCount = randomInt(4); switch (minDocCount) { - case 0: - break; - case 1: - case 2: - case 3: - case 4: - minDocCount = randomIntBetween(0, Integer.MAX_VALUE); - break; - default: - fail(); + case 0: + break; + case 1: + case 2: + case 3: + case 4: + minDocCount = randomIntBetween(0, Integer.MAX_VALUE); + break; + default: + fail(); } factory.minDocCount(minDocCount); } if (randomBoolean()) { int shardMinDocCount = randomInt(4); switch (shardMinDocCount) { - case 0: - break; - case 1: - case 2: - case 3: - case 4: - shardMinDocCount = randomIntBetween(0, Integer.MAX_VALUE); - break; - default: - fail(); + case 0: + break; + case 1: + case 2: + case 3: + case 4: + shardMinDocCount = randomIntBetween(0, Integer.MAX_VALUE); + break; + default: + fail(); } factory.shardMinDocCount(shardMinDocCount); } @@ -133,7 +133,7 @@ protected TermsAggregationBuilder createTestAggregatorBuilder() { } if (randomBoolean()) { List order = randomOrder(); - if(order.size() == 1 && randomBoolean()) { + if (order.size() == 1 && randomBoolean()) { factory.order(order.get(0)); } else { factory.order(order); @@ -148,26 +148,26 @@ protected TermsAggregationBuilder createTestAggregatorBuilder() { private List randomOrder() { List orders = new ArrayList<>(); switch (randomInt(4)) { - case 0: - orders.add(BucketOrder.key(randomBoolean())); - break; - case 1: - orders.add(BucketOrder.count(randomBoolean())); - break; - case 2: - orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomBoolean())); - break; - case 3: - orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomAlphaOfLengthBetween(3, 20), randomBoolean())); - break; - case 4: - int numOrders = randomIntBetween(1, 3); - for (int i = 0; i < numOrders; i++) { - orders.addAll(randomOrder()); - } - break; - default: - fail(); + case 0: + orders.add(BucketOrder.key(randomBoolean())); + break; + case 1: + orders.add(BucketOrder.count(randomBoolean())); + break; + case 2: + orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomBoolean())); + break; + case 3: + orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomAlphaOfLengthBetween(3, 20), randomBoolean())); + break; + case 4: + int numOrders = randomIntBetween(1, 3); + for (int i = 0; i < numOrders; i++) { + orders.addAll(randomOrder()); + } + break; + default: + fail(); } return orders; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java index 942606b793327..87a9f51db4187 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java @@ -68,15 +68,16 @@ protected void assertReduced(InternalAdjacencyMatrix reduced, List 0) { - expectedCounts.compute(bucket.getKeyAsString(), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + expectedCounts.compute( + bucket.getKeyAsString(), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount() + ); } } } final Map actualCounts = new TreeMap<>(); for (InternalAdjacencyMatrix.InternalBucket bucket : reduced.getBuckets()) { - actualCounts.compute(bucket.getKeyAsString(), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + actualCounts.compute(bucket.getKeyAsString(), (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); } assertEquals(expectedCounts, actualCounts); } @@ -92,24 +93,25 @@ protected InternalAdjacencyMatrix mutateInstance(InternalAdjacencyMatrix instanc List buckets = instance.getBuckets(); Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - buckets = new ArrayList<>(buckets); - buckets.add(new InternalAdjacencyMatrix.InternalBucket(randomAlphaOfLength(10), randomNonNegativeLong(), - InternalAggregations.EMPTY)); - break; - case 2: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + buckets = new ArrayList<>(buckets); + buckets.add( + new InternalAdjacencyMatrix.InternalBucket(randomAlphaOfLength(10), randomNonNegativeLong(), InternalAggregations.EMPTY) + ); + break; + case 2: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalAdjacencyMatrix(name, buckets, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java index 70d41a6420c9e..de1a900a45800 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java @@ -27,11 +27,17 @@ private DateHistogramValuesSourceBuilder randomDateHistogramSourceBuilder() { histo.script(new Script(randomAlphaOfLengthBetween(10, 20))); } if (randomBoolean()) { - histo.calendarInterval(randomFrom(DateHistogramInterval.days(1), - DateHistogramInterval.minutes(1), DateHistogramInterval.weeks(1))); + histo.calendarInterval( + randomFrom(DateHistogramInterval.days(1), DateHistogramInterval.minutes(1), DateHistogramInterval.weeks(1)) + ); } else { - histo.fixedInterval(randomFrom(new DateHistogramInterval(randomNonNegativeLong() + "ms"), - DateHistogramInterval.days(10), DateHistogramInterval.hours(10))); + histo.fixedInterval( + randomFrom( + new DateHistogramInterval(randomNonNegativeLong() + "ms"), + DateHistogramInterval.days(10), + DateHistogramInterval.hours(10) + ) + ); } if (randomBoolean()) { histo.timeZone(randomZone()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index d9566566d3a25..f9d87a8ae409e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -98,7 +98,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class CompositeAggregatorTests extends AggregatorTestCase { +public class CompositeAggregatorTests extends AggregatorTestCase { private static MappedFieldType[] FIELD_TYPES; private List objectMappers; @@ -137,7 +137,7 @@ protected IndexReader wrapDirectoryReader(DirectoryReader reader) throws IOExcep if (false == objectMappers().isEmpty()) { return wrapInMockESDirectoryReader(reader); } - return reader; + return reader; } public void testUnmappedFieldWithTerms() throws Exception { @@ -152,22 +152,19 @@ public void testUnmappedFieldWithTerms() throws Exception { createDocument("keyword", "c") ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("unmapped").field("unmapped") - ) - ), - (result) -> { - assertEquals(0, result.getBuckets().size()); - } + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder("name", Arrays.asList(new TermsValuesSourceBuilder("unmapped").field("unmapped"))), + (result) -> { assertEquals(0, result.getBuckets().size()); } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true) - ) + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new TermsValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true)) ), (result) -> { assertEquals(1, result.getBuckets().size()); @@ -177,30 +174,34 @@ public void testUnmappedFieldWithTerms() throws Exception { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true) - )).aggregateAfter(Collections.singletonMap("unmapped", null)), - (result) -> { - assertEquals(0, result.getBuckets().size()); - } + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new TermsValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true)) + ).aggregateAfter(Collections.singletonMap("unmapped", null)), + (result) -> { assertEquals(0, result.getBuckets().size()); } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> new CompositeAggregationBuilder("name", + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", Arrays.asList( new TermsValuesSourceBuilder("keyword").field("keyword"), new TermsValuesSourceBuilder("unmapped").field("unmapped") ) ), - (result) -> { - assertEquals(0, result.getBuckets().size()); - } + (result) -> { assertEquals(0, result.getBuckets().size()); } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> new CompositeAggregationBuilder("name", + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", Arrays.asList( new TermsValuesSourceBuilder("keyword").field("keyword"), new TermsValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true) @@ -236,11 +237,7 @@ public void testUnmappedFieldWithGeopoint() throws Exception { testSearchCase( Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), dataset, - () -> new CompositeAggregationBuilder("name", - Arrays.asList( - new GeoTileGridValuesSourceBuilder("unmapped") .field("unmapped") - ) - ), + () -> new CompositeAggregationBuilder("name", Arrays.asList(new GeoTileGridValuesSourceBuilder("unmapped").field("unmapped"))), (result) -> assertEquals(0, result.getBuckets().size()) ); @@ -248,10 +245,9 @@ public void testUnmappedFieldWithGeopoint() throws Exception { testSearchCase( Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), dataset, - () -> new CompositeAggregationBuilder("name", - Arrays.asList( - new GeoTileGridValuesSourceBuilder("unmapped") .field("unmapped").missingBucket(true) - ) + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new GeoTileGridValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true)) ), (result) -> { assertEquals(1, result.getBuckets().size()); @@ -265,10 +261,11 @@ public void testUnmappedFieldWithGeopoint() throws Exception { testSearchCase( Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), dataset, - () -> new CompositeAggregationBuilder("name", + () -> new CompositeAggregationBuilder( + "name", Arrays.asList( new GeoTileGridValuesSourceBuilder(mappedFieldName).field(mappedFieldName), - new GeoTileGridValuesSourceBuilder("unmapped") .field("unmapped") + new GeoTileGridValuesSourceBuilder("unmapped").field("unmapped") ) ), (result) -> assertEquals(0, result.getBuckets().size()) @@ -278,10 +275,11 @@ public void testUnmappedFieldWithGeopoint() throws Exception { testSearchCase( Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), dataset, - () -> new CompositeAggregationBuilder("name", + () -> new CompositeAggregationBuilder( + "name", Arrays.asList( new GeoTileGridValuesSourceBuilder(mappedFieldName).field(mappedFieldName), - new GeoTileGridValuesSourceBuilder("unmapped") .field("unmapped").missingBucket(true) + new GeoTileGridValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true) ) ), (result) -> { @@ -470,22 +468,19 @@ public void testUnmappedFieldWithLongs() throws Exception { ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), dataset, - () -> new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("unmapped").field("unmapped") - ) - ), - (result) -> { - assertEquals(0, result.getBuckets().size()); - } + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), + dataset, + () -> new CompositeAggregationBuilder("name", Arrays.asList(new TermsValuesSourceBuilder("unmapped").field("unmapped"))), + (result) -> { assertEquals(0, result.getBuckets().size()); } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), dataset, - () -> new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true) - ) + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new TermsValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true)) ), (result) -> { assertEquals(1, result.getBuckets().size()); @@ -495,30 +490,34 @@ public void testUnmappedFieldWithLongs() throws Exception { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), dataset, - () -> new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true) - )).aggregateAfter(Collections.singletonMap("unmapped", null)), - (result) -> { - assertEquals(0, result.getBuckets().size()); - } + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new TermsValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true)) + ).aggregateAfter(Collections.singletonMap("unmapped", null)), + (result) -> { assertEquals(0, result.getBuckets().size()); } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), dataset, - () -> new CompositeAggregationBuilder("name", + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), + dataset, + () -> new CompositeAggregationBuilder( + "name", Arrays.asList( new TermsValuesSourceBuilder("long").field("long"), new TermsValuesSourceBuilder("unmapped").field("unmapped") ) ), - (result) -> { - assertEquals(0, result.getBuckets().size()); - } + (result) -> { assertEquals(0, result.getBuckets().size()); } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), dataset, - () -> new CompositeAggregationBuilder("name", + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), + dataset, + () -> new CompositeAggregationBuilder( + "name", Arrays.asList( new TermsValuesSourceBuilder("long").field("long"), new TermsValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true) @@ -536,8 +535,11 @@ public void testUnmappedFieldWithLongs() throws Exception { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), dataset, - () -> new CompositeAggregationBuilder("name", + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("long")), + dataset, + () -> new CompositeAggregationBuilder( + "name", Arrays.asList( new TermsValuesSourceBuilder("long").field("long"), new TermsValuesSourceBuilder("unmapped").field("unmapped").missingBucket(true) @@ -565,38 +567,33 @@ public void testWithKeyword() throws Exception { createDocument("keyword", "c") ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); - }, (result) -> { - assertEquals(3, result.getBuckets().size()); - assertEquals("{keyword=d}", result.afterKey().toString()); - assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(2).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); + }, (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=d}", result.afterKey().toString()); + assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .aggregateAfter(Collections.singletonMap("keyword", "a")); - }, (result) -> { - assertEquals(2, result.getBuckets().size()); - assertEquals("{keyword=d}", result.afterKey().toString()); - assertEquals("{keyword=c}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(1).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).aggregateAfter( + Collections.singletonMap("keyword", "a") + ); + }, (result) -> { + assertEquals(2, result.getBuckets().size()); + assertEquals("{keyword=d}", result.afterKey().toString()); + assertEquals("{keyword=c}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + }); } /** @@ -614,31 +611,25 @@ public void testUsingTestCase() throws Exception { createDocument("keyword", "c") ) ); - testCase( - new CompositeAggregationBuilder("name", Collections.singletonList(terms)), - new MatchAllDocsQuery(), - iw -> { - Document document = new Document(); - int id = 0; - for (Map> fields : dataset) { - document.clear(); - addToDocument(id, document, fields); - iw.addDocument(document); - id++; - } - }, - (InternalComposite result) -> { - assertEquals(3, result.getBuckets().size()); - assertEquals("{keyword=d}", result.afterKey().toString()); - assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(2).getDocCount()); - }, - FIELD_TYPES - ); + testCase(new CompositeAggregationBuilder("name", Collections.singletonList(terms)), new MatchAllDocsQuery(), iw -> { + Document document = new Document(); + int id = 0; + for (Map> fields : dataset) { + document.clear(); + addToDocument(id, document, fields); + iw.addDocument(document); + id++; + } + }, (InternalComposite result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=d}", result.afterKey().toString()); + assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + }, FIELD_TYPES); } /** @@ -654,43 +645,39 @@ public void testSubAggregationOfNested() throws Exception { NestedAggregationBuilder builder = new NestedAggregationBuilder("nestedAggName", nestedPath); builder.subAggregation(new CompositeAggregationBuilder("compositeAggName", Collections.singletonList(terms))); // Without after - testCase( - builder, - new MatchAllDocsQuery(), - iw -> { - // Sub-Docs - List documents = new ArrayList<>(); - documents.add(createNestedDocument("1", nestedPath, leafNameField, "Pens and Stuff", "price" , 10L)); - documents.add(createNestedDocument("1", nestedPath, leafNameField, "Pen World", "price" , 9L)); - documents.add(createNestedDocument("2", nestedPath, leafNameField, "Pens and Stuff", "price" , 5L)); - documents.add(createNestedDocument("2", nestedPath, leafNameField, "Stationary", "price" , 7L)); - // Root docs - Document root; - root = new Document(); - root.add(new Field(IdFieldMapper.NAME, Uid.encodeId("1"), IdFieldMapper.Defaults.FIELD_TYPE)); - root.add(sequenceIDFields.primaryTerm); - root.add(new StringField(rootNameField, new BytesRef("Ballpoint"), Field.Store.NO)); - documents.add(root); - - root = new Document(); - root.add(new Field(IdFieldMapper.NAME, Uid.encodeId("2"), IdFieldMapper.Defaults.FIELD_TYPE)); - root.add(new StringField(rootNameField, new BytesRef("Notebook"), Field.Store.NO)); - root.add(sequenceIDFields.primaryTerm); - documents.add(root); - iw.addDocuments(documents); - }, - (InternalSingleBucketAggregation parent) -> { - assertEquals(1, parent.getAggregations().asList().size()); - InternalComposite result = (InternalComposite) parent.getProperty("compositeAggName"); - assertEquals(3, result.getBuckets().size()); - assertEquals("{keyword=Stationary}", result.afterKey().toString()); - assertEquals("{keyword=Pen World}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(0).getDocCount()); - assertEquals("{keyword=Pens and Stuff}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=Stationary}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(2).getDocCount()); - }, + testCase(builder, new MatchAllDocsQuery(), iw -> { + // Sub-Docs + List documents = new ArrayList<>(); + documents.add(createNestedDocument("1", nestedPath, leafNameField, "Pens and Stuff", "price", 10L)); + documents.add(createNestedDocument("1", nestedPath, leafNameField, "Pen World", "price", 9L)); + documents.add(createNestedDocument("2", nestedPath, leafNameField, "Pens and Stuff", "price", 5L)); + documents.add(createNestedDocument("2", nestedPath, leafNameField, "Stationary", "price", 7L)); + // Root docs + Document root; + root = new Document(); + root.add(new Field(IdFieldMapper.NAME, Uid.encodeId("1"), IdFieldMapper.Defaults.FIELD_TYPE)); + root.add(sequenceIDFields.primaryTerm); + root.add(new StringField(rootNameField, new BytesRef("Ballpoint"), Field.Store.NO)); + documents.add(root); + + root = new Document(); + root.add(new Field(IdFieldMapper.NAME, Uid.encodeId("2"), IdFieldMapper.Defaults.FIELD_TYPE)); + root.add(new StringField(rootNameField, new BytesRef("Notebook"), Field.Store.NO)); + root.add(sequenceIDFields.primaryTerm); + documents.add(root); + iw.addDocuments(documents); + }, (InternalSingleBucketAggregation parent) -> { + assertEquals(1, parent.getAggregations().asList().size()); + InternalComposite result = (InternalComposite) parent.getProperty("compositeAggName"); + assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=Stationary}", result.afterKey().toString()); + assertEquals("{keyword=Pen World}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("{keyword=Pens and Stuff}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=Stationary}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + }, new KeywordFieldMapper.KeywordFieldType(nestedPath + "." + leafNameField), new NumberFieldMapper.NumberFieldType("price", NumberFieldMapper.NumberType.LONG) ); @@ -708,41 +695,39 @@ public void testSubAggregationOfNestedAggregateAfter() throws Exception { TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field(nestedPath + "." + leafNameField); NestedAggregationBuilder builder = new NestedAggregationBuilder("nestedAggName", nestedPath); builder.subAggregation( - new CompositeAggregationBuilder("compositeAggName", Collections.singletonList(terms)) - .aggregateAfter(createAfterKey("keyword", "Pens and Stuff"))); - testCase( - builder, - new MatchAllDocsQuery(), - iw -> { - // Sub-Docs - List documents = new ArrayList<>(); - documents.add(createNestedDocument("1", nestedPath, leafNameField, "Pens and Stuff", "price" , 10L)); - documents.add(createNestedDocument("1", nestedPath, leafNameField, "Pen World", "price" , 9L)); - documents.add(createNestedDocument("2", nestedPath, leafNameField, "Pens and Stuff", "price" , 5L)); - documents.add(createNestedDocument("2", nestedPath, leafNameField, "Stationary", "price" , 7L)); - // Root docs - Document root; - root = new Document(); - root.add(new Field(IdFieldMapper.NAME, Uid.encodeId("1"), IdFieldMapper.Defaults.FIELD_TYPE)); - root.add(sequenceIDFields.primaryTerm); - root.add(new StringField(rootNameField, new BytesRef("Ballpoint"), Field.Store.NO)); - documents.add(root); - - root = new Document(); - root.add(new Field(IdFieldMapper.NAME, Uid.encodeId("2"), IdFieldMapper.Defaults.FIELD_TYPE)); - root.add(new StringField(rootNameField, new BytesRef("Notebook"), Field.Store.NO)); - root.add(sequenceIDFields.primaryTerm); - documents.add(root); - iw.addDocuments(documents); - }, - (InternalSingleBucketAggregation parent) -> { - assertEquals(1, parent.getAggregations().asList().size()); - InternalComposite result = (InternalComposite) parent.getProperty("compositeAggName"); - assertEquals(1, result.getBuckets().size()); - assertEquals("{keyword=Stationary}", result.afterKey().toString()); - assertEquals("{keyword=Stationary}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(0).getDocCount()); - }, + new CompositeAggregationBuilder("compositeAggName", Collections.singletonList(terms)).aggregateAfter( + createAfterKey("keyword", "Pens and Stuff") + ) + ); + testCase(builder, new MatchAllDocsQuery(), iw -> { + // Sub-Docs + List documents = new ArrayList<>(); + documents.add(createNestedDocument("1", nestedPath, leafNameField, "Pens and Stuff", "price", 10L)); + documents.add(createNestedDocument("1", nestedPath, leafNameField, "Pen World", "price", 9L)); + documents.add(createNestedDocument("2", nestedPath, leafNameField, "Pens and Stuff", "price", 5L)); + documents.add(createNestedDocument("2", nestedPath, leafNameField, "Stationary", "price", 7L)); + // Root docs + Document root; + root = new Document(); + root.add(new Field(IdFieldMapper.NAME, Uid.encodeId("1"), IdFieldMapper.Defaults.FIELD_TYPE)); + root.add(sequenceIDFields.primaryTerm); + root.add(new StringField(rootNameField, new BytesRef("Ballpoint"), Field.Store.NO)); + documents.add(root); + + root = new Document(); + root.add(new Field(IdFieldMapper.NAME, Uid.encodeId("2"), IdFieldMapper.Defaults.FIELD_TYPE)); + root.add(new StringField(rootNameField, new BytesRef("Notebook"), Field.Store.NO)); + root.add(sequenceIDFields.primaryTerm); + documents.add(root); + iw.addDocuments(documents); + }, (InternalSingleBucketAggregation parent) -> { + assertEquals(1, parent.getAggregations().asList().size()); + InternalComposite result = (InternalComposite) parent.getProperty("compositeAggName"); + assertEquals(1, result.getBuckets().size()); + assertEquals("{keyword=Stationary}", result.afterKey().toString()); + assertEquals("{keyword=Stationary}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + }, new KeywordFieldMapper.KeywordFieldType(nestedPath + "." + leafNameField), new NumberFieldMapper.NumberFieldType("price", NumberFieldMapper.NumberType.LONG) ); @@ -763,80 +748,68 @@ public void testWithKeywordAndMissingBucket() throws Exception { ); // sort ascending, null bucket is first - testSearchCase(Arrays.asList(new MatchAllDocsQuery()), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword") - .missingBucket(true); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); - }, (result) -> { - assertEquals(4, result.getBuckets().size()); - assertEquals("{keyword=d}", result.afterKey().toString()); - assertEquals("{keyword=null}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("{keyword=a}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=c}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(2).getDocCount()); - assertEquals("{keyword=d}", result.getBuckets().get(3).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(3).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery()), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword").missingBucket(true); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); + }, (result) -> { + assertEquals(4, result.getBuckets().size()); + assertEquals("{keyword=d}", result.afterKey().toString()); + assertEquals("{keyword=null}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{keyword=a}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=c}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(2).getDocCount()); + assertEquals("{keyword=d}", result.getBuckets().get(3).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(3).getDocCount()); + }); // sort descending, null bucket is last - testSearchCase(Arrays.asList(new MatchAllDocsQuery()), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword") - .missingBucket(true) - .order(SortOrder.DESC); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); - }, (result) -> { - assertEquals(4, result.getBuckets().size()); - assertEquals("{keyword=null}", result.afterKey().toString()); - assertEquals("{keyword=null}", result.getBuckets().get(3).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(3).getDocCount()); - assertEquals("{keyword=a}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(2).getDocCount()); - assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=d}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(0).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery()), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword") + .missingBucket(true) + .order(SortOrder.DESC); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); + }, (result) -> { + assertEquals(4, result.getBuckets().size()); + assertEquals("{keyword=null}", result.afterKey().toString()); + assertEquals("{keyword=null}", result.getBuckets().get(3).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(3).getDocCount()); + assertEquals("{keyword=a}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(2).getDocCount()); + assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=d}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword") - .missingBucket(true); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .aggregateAfter(Collections.singletonMap("keyword", null)); - }, (result) -> { - assertEquals(3, result.getBuckets().size()); - assertEquals("{keyword=d}", result.afterKey().toString()); - assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(2).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword").missingBucket(true); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).aggregateAfter( + Collections.singletonMap("keyword", null) + ); + }, (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=d}", result.afterKey().toString()); + assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword") - .missingBucket(true) - .order(SortOrder.DESC); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .aggregateAfter(Collections.singletonMap("keyword", null)); - }, (result) -> { - assertEquals(0, result.getBuckets().size()); - assertNull(result.afterKey()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword") + .missingBucket(true) + .order(SortOrder.DESC); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).aggregateAfter( + Collections.singletonMap("keyword", null) + ); + }, (result) -> { + assertEquals(0, result.getBuckets().size()); + assertNull(result.afterKey()); + }); } public void testWithKeywordMissingAfter() throws Exception { @@ -851,60 +824,53 @@ public void testWithKeywordMissingAfter() throws Exception { createDocument("keyword", "delta") ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); - }, (result) -> { - assertEquals(4, result.getBuckets().size()); - assertEquals("{keyword=zoo}", result.afterKey().toString()); - assertEquals("{keyword=bar}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("{keyword=delta}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=foo}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(2).getDocCount()); - assertEquals("{keyword=zoo}", result.getBuckets().get(3).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(3).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); + }, (result) -> { + assertEquals(4, result.getBuckets().size()); + assertEquals("{keyword=zoo}", result.afterKey().toString()); + assertEquals("{keyword=bar}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{keyword=delta}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=foo}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(2).getDocCount()); + assertEquals("{keyword=zoo}", result.getBuckets().get(3).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(3).getDocCount()); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .aggregateAfter(Collections.singletonMap("keyword", "car")); - }, (result) -> { - assertEquals(3, result.getBuckets().size()); - assertEquals("{keyword=zoo}", result.afterKey().toString()); - assertEquals("{keyword=delta}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(0).getDocCount()); - assertEquals("{keyword=foo}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=zoo}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(2).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).aggregateAfter( + Collections.singletonMap("keyword", "car") + ); + }, (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=zoo}", result.afterKey().toString()); + assertEquals("{keyword=delta}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("{keyword=foo}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=zoo}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword").order(SortOrder.DESC); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .aggregateAfter(Collections.singletonMap("keyword", "mar")); - }, (result) -> { - assertEquals(3, result.getBuckets().size()); - assertEquals("{keyword=bar}", result.afterKey().toString()); - assertEquals("{keyword=foo}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("{keyword=delta}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=bar}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(2).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).aggregateAfter( + Collections.singletonMap("keyword", "mar") + ); + }, (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=bar}", result.afterKey().toString()); + assertEquals("{keyword=foo}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{keyword=delta}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=bar}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(2).getDocCount()); + }); } public void testWithKeywordDesc() throws Exception { @@ -918,40 +884,33 @@ public void testWithKeywordDesc() throws Exception { createDocument("keyword", "c") ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword") - .order(SortOrder.DESC); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); - }, (result) -> { - assertEquals(3, result.getBuckets().size()); - assertEquals("{keyword=a}", result.afterKey().toString()); - assertEquals("{keyword=a}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(2).getDocCount()); - assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=d}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(0).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); + }, (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=a}", result.afterKey().toString()); + assertEquals("{keyword=a}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(2).getDocCount()); + assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=d}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword") - .order(SortOrder.DESC); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .aggregateAfter(Collections.singletonMap("keyword", "c")); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).aggregateAfter( + Collections.singletonMap("keyword", "c") + ); - }, (result) -> { - assertEquals(result.afterKey().toString(), "{keyword=a}"); - assertEquals("{keyword=a}", result.afterKey().toString()); - assertEquals(1, result.getBuckets().size()); - assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - } - ); + }, (result) -> { + assertEquals(result.afterKey().toString(), "{keyword=a}"); + assertEquals("{keyword=a}", result.afterKey().toString()); + assertEquals(1, result.getBuckets().size()); + assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + }); } public void testMultiValuedWithKeyword() throws Exception { @@ -966,46 +925,41 @@ public void testMultiValuedWithKeyword() throws Exception { ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); - - }, (result) -> { - assertEquals(5, result.getBuckets().size()); - assertEquals("{keyword=z}", result.afterKey().toString()); - assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("{keyword=b}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=c}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(2).getDocCount()); - assertEquals("{keyword=d}", result.getBuckets().get(3).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(3).getDocCount()); - assertEquals("{keyword=z}", result.getBuckets().get(4).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(4).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); + + }, (result) -> { + assertEquals(5, result.getBuckets().size()); + assertEquals("{keyword=z}", result.afterKey().toString()); + assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{keyword=b}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=c}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + assertEquals("{keyword=d}", result.getBuckets().get(3).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(3).getDocCount()); + assertEquals("{keyword=z}", result.getBuckets().get(4).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(4).getDocCount()); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .aggregateAfter(Collections.singletonMap("keyword", "b")); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).aggregateAfter( + Collections.singletonMap("keyword", "b") + ); - }, (result) -> { - assertEquals(3, result.getBuckets().size()); - assertEquals("{keyword=z}", result.afterKey().toString()); - assertEquals("{keyword=c}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(0).getDocCount()); - assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=z}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(2).getDocCount()); - } - ); + }, (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=z}", result.afterKey().toString()); + assertEquals("{keyword=c}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=z}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + }); } public void testMultiValuedWithKeywordDesc() throws Exception { @@ -1020,46 +974,39 @@ public void testMultiValuedWithKeywordDesc() throws Exception { ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword") - .order(SortOrder.DESC); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); - - }, (result) -> { - assertEquals(5, result.getBuckets().size()); - assertEquals("{keyword=a}", result.afterKey().toString()); - assertEquals("{keyword=a}", result.getBuckets().get(4).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(4).getDocCount()); - assertEquals("{keyword=b}", result.getBuckets().get(3).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(3).getDocCount()); - assertEquals("{keyword=c}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(2).getDocCount()); - assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=z}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(0).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); + + }, (result) -> { + assertEquals(5, result.getBuckets().size()); + assertEquals("{keyword=a}", result.afterKey().toString()); + assertEquals("{keyword=a}", result.getBuckets().get(4).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(4).getDocCount()); + assertEquals("{keyword=b}", result.getBuckets().get(3).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(3).getDocCount()); + assertEquals("{keyword=c}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=z}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword") - .order(SortOrder.DESC); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .aggregateAfter(Collections.singletonMap("keyword", "c")); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).aggregateAfter( + Collections.singletonMap("keyword", "c") + ); - }, (result) -> { - assertEquals(2, result.getBuckets().size()); - assertEquals("{keyword=a}", result.afterKey().toString()); - assertEquals("{keyword=a}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - assertEquals("{keyword=b}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - } - ); + }, (result) -> { + assertEquals(2, result.getBuckets().size()); + assertEquals("{keyword=a}", result.afterKey().toString()); + assertEquals("{keyword=a}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("{keyword=b}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + }); } public void testWithKeywordAndLong() throws Exception { @@ -1075,12 +1022,12 @@ public void testWithKeywordAndLong() throws Exception { createDocument("long", 100L) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new TermsValuesSourceBuilder("long").field("long") - ) + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new TermsValuesSourceBuilder("keyword").field("keyword"), new TermsValuesSourceBuilder("long").field("long")) ), (result) -> { assertEquals(4, result.getBuckets().size()); @@ -1096,14 +1043,13 @@ public void testWithKeywordAndLong() throws Exception { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new TermsValuesSourceBuilder("long").field("long") - ) - ).aggregateAfter(createAfterKey("keyword", "a", "long", 100L) - ), + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new TermsValuesSourceBuilder("keyword").field("keyword"), new TermsValuesSourceBuilder("long").field("long")) + ).aggregateAfter(createAfterKey("keyword", "a", "long", 100L)), (result) -> { assertEquals(2, result.getBuckets().size()); assertEquals("{keyword=d, long=10}", result.afterKey().toString()); @@ -1114,20 +1060,28 @@ public void testWithKeywordAndLong() throws Exception { } ); - Exception exc = expectThrows(ElasticsearchParseException.class, - () -> testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date")), Collections.emptyList(), - () -> new CompositeAggregationBuilder("test", + Exception exc = expectThrows( + ElasticsearchParseException.class, + () -> testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date")), + Collections.emptyList(), + () -> new CompositeAggregationBuilder( + "test", Arrays.asList( new TermsValuesSourceBuilder("keyword").field("keyword"), new TermsValuesSourceBuilder("long").field("long") ) - ).aggregateAfter(createAfterKey("keyword", 0L, "long", 100L) - ), - (result) -> { - } - )); - assertThat(exc.getMessage(), containsString("Cannot set after key in the composite aggregation [test] - incompatible value in " + - "the position 0: invalid value, expected string, got Long")); + ).aggregateAfter(createAfterKey("keyword", 0L, "long", 100L)), + (result) -> {} + ) + ); + assertThat( + exc.getMessage(), + containsString( + "Cannot set after key in the composite aggregation [test] - incompatible value in " + + "the position 0: invalid value, expected string, got Long" + ) + ); } public void testWithKeywordAndLongDesc() throws Exception { @@ -1143,14 +1097,16 @@ public void testWithKeywordAndLongDesc() throws Exception { createDocument("long", 100L) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC), - new TermsValuesSourceBuilder("long").field("long").order(SortOrder.DESC) - ) - ), + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC), + new TermsValuesSourceBuilder("long").field("long").order(SortOrder.DESC) + ) + ), (result) -> { assertEquals(4, result.getBuckets().size()); assertEquals("{keyword=a, long=0}", result.afterKey().toString()); @@ -1165,14 +1121,17 @@ public void testWithKeywordAndLongDesc() throws Exception { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC), - new TermsValuesSourceBuilder("long").field("long").order(SortOrder.DESC) - )).aggregateAfter(createAfterKey("keyword", "d", "long", 10L) - ), (result) -> { + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC), + new TermsValuesSourceBuilder("long").field("long").order(SortOrder.DESC) + ) + ).aggregateAfter(createAfterKey("keyword", "d", "long", 10L)), + (result) -> { assertEquals(3, result.getBuckets().size()); assertEquals("{keyword=a, long=0}", result.afterKey().toString()); assertEquals("{keyword=a, long=0}", result.getBuckets().get(2).getKeyAsString()); @@ -1200,8 +1159,11 @@ public void testWithKeywordLongAndMissingBucket() throws Exception { createDocument("double", 0d) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery()), dataset, - () -> new CompositeAggregationBuilder("name", + testSearchCase( + Arrays.asList(new MatchAllDocsQuery()), + dataset, + () -> new CompositeAggregationBuilder( + "name", Arrays.asList( new TermsValuesSourceBuilder("keyword").field("keyword").missingBucket(true), new TermsValuesSourceBuilder("long").field("long").missingBucket(true) @@ -1227,14 +1189,16 @@ public void testWithKeywordLongAndMissingBucket() throws Exception { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> new CompositeAggregationBuilder("name", + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", Arrays.asList( new TermsValuesSourceBuilder("keyword").field("keyword").missingBucket(true), new TermsValuesSourceBuilder("long").field("long").missingBucket(true) ) - ).aggregateAfter(createAfterKey("keyword", "c", "long", null) - ), + ).aggregateAfter(createAfterKey("keyword", "c", "long", null)), (result) -> { assertEquals(2, result.getBuckets().size()); assertEquals("{keyword=d, long=10}", result.afterKey().toString()); @@ -1259,14 +1223,14 @@ public void testMultiValuedWithKeywordAndLong() throws Exception { ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new TermsValuesSourceBuilder("long").field("long") - )) - , (result) -> { + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new TermsValuesSourceBuilder("keyword").field("keyword"), new TermsValuesSourceBuilder("long").field("long")) + ), + (result) -> { assertEquals(10, result.getBuckets().size()); assertEquals("{keyword=z, long=0}", result.afterKey().toString()); assertEquals("{keyword=a, long=0}", result.getBuckets().get(0).getKeyAsString()); @@ -1292,15 +1256,14 @@ public void testMultiValuedWithKeywordAndLong() throws Exception { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new TermsValuesSourceBuilder("long").field("long") - ) - ).aggregateAfter(createAfterKey("keyword", "c", "long", 10L)) - , (result) -> { + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new TermsValuesSourceBuilder("keyword").field("keyword"), new TermsValuesSourceBuilder("long").field("long")) + ).aggregateAfter(createAfterKey("keyword", "c", "long", 10L)), + (result) -> { assertEquals(6, result.getBuckets().size()); assertEquals("{keyword=z, long=100}", result.afterKey().toString()); assertEquals("{keyword=c, long=100}", result.getBuckets().get(0).getKeyAsString()); @@ -1332,15 +1295,16 @@ public void testMultiValuedWithKeywordAndLongDesc() throws Exception { ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC), - new TermsValuesSourceBuilder("long").field("long").order(SortOrder.DESC) - ) - ).aggregateAfter(createAfterKey("keyword", "z", "long", 100L) - ), + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC), + new TermsValuesSourceBuilder("long").field("long").order(SortOrder.DESC) + ) + ).aggregateAfter(createAfterKey("keyword", "z", "long", 100L)), (result) -> { assertEquals(10, result.getBuckets().size()); assertEquals("{keyword=a, long=0}", result.afterKey().toString()); @@ -1367,15 +1331,16 @@ public void testMultiValuedWithKeywordAndLongDesc() throws Exception { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC), - new TermsValuesSourceBuilder("long").field("long").order(SortOrder.DESC) - ) - ).aggregateAfter(createAfterKey("keyword", "b", "long", 100L) - ), + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC), + new TermsValuesSourceBuilder("long").field("long").order(SortOrder.DESC) + ) + ).aggregateAfter(createAfterKey("keyword", "b", "long", 100L)), (result) -> { assertEquals(2, result.getBuckets().size()); assertEquals("{keyword=a, long=0}", result.afterKey().toString()); @@ -1393,24 +1358,32 @@ public void testMultiValuedWithKeywordLongAndDouble() throws Exception { Arrays.asList( createDocument("keyword", Arrays.asList("a", "b", "c"), "long", 100L, "double", 0.4d), createDocument("keyword", "c", "long", Arrays.asList(100L, 0L, 10L), "double", 0.09d), - createDocument("keyword", Arrays.asList("a", "z", "c"), "long", Arrays.asList(0L, 100L), - "double", Arrays.asList(0.4d, 0.09d)), + createDocument( + "keyword", + Arrays.asList("a", "z", "c"), + "long", + Arrays.asList(0L, 100L), + "double", + Arrays.asList(0.4d, 0.09d) + ), createDocument("keyword", Arrays.asList("d", "d"), "long", Arrays.asList(10L, 100L, 1000L), "double", 1.0d), createDocument("keyword", "c"), createDocument("long", 100L) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new TermsValuesSourceBuilder("long").field("long"), - new TermsValuesSourceBuilder("double").field("double") - ) + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new TermsValuesSourceBuilder("keyword").field("keyword"), + new TermsValuesSourceBuilder("long").field("long"), + new TermsValuesSourceBuilder("double").field("double") ) - , (result) -> { + ), + (result) -> { assertEquals(10, result.getBuckets().size()); assertEquals("{keyword=c, long=100, double=0.4}", result.afterKey().toString()); assertEquals("{keyword=a, long=0, double=0.09}", result.getBuckets().get(0).getKeyAsString()); @@ -1436,16 +1409,18 @@ public void testMultiValuedWithKeywordLongAndDouble() throws Exception { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new TermsValuesSourceBuilder("long").field("long"), - new TermsValuesSourceBuilder("double").field("double") - ) - ).aggregateAfter(createAfterKey("keyword", "a", "long", 100L, "double", 0.4d)) - , (result) -> { + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new TermsValuesSourceBuilder("keyword").field("keyword"), + new TermsValuesSourceBuilder("long").field("long"), + new TermsValuesSourceBuilder("double").field("double") + ) + ).aggregateAfter(createAfterKey("keyword", "a", "long", 100L, "double", 0.4d)), + (result) -> { assertEquals(10, result.getBuckets().size()); assertEquals("{keyword=z, long=0, double=0.09}", result.afterKey().toString()); assertEquals("{keyword=b, long=100, double=0.4}", result.getBuckets().get(0).getKeyAsString()); @@ -1471,16 +1446,18 @@ public void testMultiValuedWithKeywordLongAndDouble() throws Exception { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new TermsValuesSourceBuilder("long").field("long"), - new TermsValuesSourceBuilder("double").field("double") - ) - ).aggregateAfter(createAfterKey("keyword", "z", "long", 100L, "double", 0.4d)) - , (result) -> { + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new TermsValuesSourceBuilder("keyword").field("keyword"), + new TermsValuesSourceBuilder("long").field("long"), + new TermsValuesSourceBuilder("double").field("double") + ) + ).aggregateAfter(createAfterKey("keyword", "z", "long", 100L, "double", 0.4d)), + (result) -> { assertEquals(0, result.getBuckets().size()); assertNull(result.afterKey()); } @@ -1499,15 +1476,15 @@ public void testWithDateHistogram() throws IOException { createDocument("long", 4L) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date"), - LongPoint.newRangeQuery( - "date", - asLong("2016-09-20T09:00:34"), - asLong("2017-10-20T06:09:24") - )), dataset, + testSearchCase( + Arrays.asList( + new MatchAllDocsQuery(), + new DocValuesFieldExistsQuery("date"), + LongPoint.newRangeQuery("date", asLong("2016-09-20T09:00:34"), asLong("2017-10-20T06:09:24")) + ), + dataset, () -> { - DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") - .field("date") + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") .calendarInterval(DateHistogramInterval.days(1)); return new CompositeAggregationBuilder("name", Collections.singletonList(histo)); }, @@ -1523,20 +1500,22 @@ public void testWithDateHistogram() throws IOException { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date"), - LongPoint.newRangeQuery( - "date", - asLong("2016-09-20T11:34:00"), - asLong("2017-10-20T06:09:24") - )), dataset, + testSearchCase( + Arrays.asList( + new MatchAllDocsQuery(), + new DocValuesFieldExistsQuery("date"), + LongPoint.newRangeQuery("date", asLong("2016-09-20T11:34:00"), asLong("2017-10-20T06:09:24")) + ), + dataset, () -> { - DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") - .field("date") + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") .calendarInterval(DateHistogramInterval.days(1)); - return new CompositeAggregationBuilder("name", Collections.singletonList(histo)) - .aggregateAfter(createAfterKey("date", 1474329600000L)); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)).aggregateAfter( + createAfterKey("date", 1474329600000L) + ); - }, (result) -> { + }, + (result) -> { assertEquals(2, result.getBuckets().size()); assertEquals("{date=1508457600000}", result.afterKey().toString()); assertEquals("{date=1508371200000}", result.getBuckets().get(0).getKeyAsString()); @@ -1550,21 +1529,23 @@ public void testWithDateHistogram() throws IOException { * Tests a four hour offset, which moves the document with * date 2017-10-20T03:08:45 into 2017-10-19's bucket. */ - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date"), - LongPoint.newRangeQuery( - "date", - asLong("2016-09-20T09:00:34"), - asLong("2017-10-20T06:09:24") - )), dataset, + testSearchCase( + Arrays.asList( + new MatchAllDocsQuery(), + new DocValuesFieldExistsQuery("date"), + LongPoint.newRangeQuery("date", asLong("2016-09-20T09:00:34"), asLong("2017-10-20T06:09:24")) + ), + dataset, () -> { - DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") - .field("date") + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") .calendarInterval(DateHistogramInterval.days(1)) .offset(TimeUnit.HOURS.toMillis(4)); - return new CompositeAggregationBuilder("name", Collections.singletonList(histo)) - .aggregateAfter(createAfterKey("date", 1474329600000L)); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)).aggregateAfter( + createAfterKey("date", 1474329600000L) + ); - }, (result) -> { + }, + (result) -> { assertEquals(3, result.getBuckets().size()); assertEquals("{date=1508472000000}", result.afterKey().toString()); assertEquals("{date=1474344000000}", result.getBuckets().get(0).getKeyAsString()); @@ -1580,21 +1561,23 @@ public void testWithDateHistogram() throws IOException { * Tests the -04:00 time zone. This functions identically to * the four hour offset. */ - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date"), - LongPoint.newRangeQuery( - "date", - asLong("2016-09-20T09:00:34"), - asLong("2017-10-20T06:09:24") - )), dataset, + testSearchCase( + Arrays.asList( + new MatchAllDocsQuery(), + new DocValuesFieldExistsQuery("date"), + LongPoint.newRangeQuery("date", asLong("2016-09-20T09:00:34"), asLong("2017-10-20T06:09:24")) + ), + dataset, () -> { - DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") - .field("date") + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") .calendarInterval(DateHistogramInterval.days(1)) .timeZone(ZoneId.of("-04:00")); - return new CompositeAggregationBuilder("name", Collections.singletonList(histo)) - .aggregateAfter(createAfterKey("date", 1474329600000L)); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)).aggregateAfter( + createAfterKey("date", 1474329600000L) + ); - }, (result) -> { + }, + (result) -> { assertEquals(3, result.getBuckets().size()); assertEquals("{date=1508472000000}", result.afterKey().toString()); assertEquals("{date=1474344000000}", result.getBuckets().get(0).getKeyAsString()); @@ -1610,22 +1593,24 @@ public void testWithDateHistogram() throws IOException { * Tests a four hour offset with a time zone, demonstrating * why we support both things. */ - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date"), - LongPoint.newRangeQuery( - "date", - asLong("2016-09-20T09:00:34"), - asLong("2017-10-20T06:09:24") - )), dataset, + testSearchCase( + Arrays.asList( + new MatchAllDocsQuery(), + new DocValuesFieldExistsQuery("date"), + LongPoint.newRangeQuery("date", asLong("2016-09-20T09:00:34"), asLong("2017-10-20T06:09:24")) + ), + dataset, () -> { - DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") - .field("date") + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") .calendarInterval(DateHistogramInterval.days(1)) .offset(TimeUnit.HOURS.toMillis(4)) .timeZone(ZoneId.of("America/Los_Angeles")); - return new CompositeAggregationBuilder("name", Collections.singletonList(histo)) - .aggregateAfter(createAfterKey("date", 1474329600000L)); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)).aggregateAfter( + createAfterKey("date", 1474329600000L) + ); - }, (result) -> { + }, + (result) -> { assertEquals(3, result.getBuckets().size()); assertEquals("{date=1508410800000}", result.afterKey().toString()); assertEquals("{date=1474369200000}", result.getBuckets().get(0).getKeyAsString()); @@ -1650,15 +1635,15 @@ public void testWithDateTerms() throws IOException { createDocument("long", 4L) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date"), - LongPoint.newRangeQuery( - "date", - asLong("2016-09-20T09:00:34"), - asLong("2017-10-20T06:09:24") - )), dataset, + testSearchCase( + Arrays.asList( + new MatchAllDocsQuery(), + new DocValuesFieldExistsQuery("date"), + LongPoint.newRangeQuery("date", asLong("2016-09-20T09:00:34"), asLong("2017-10-20T06:09:24")) + ), + dataset, () -> { - TermsValuesSourceBuilder histo = new TermsValuesSourceBuilder("date") - .field("date"); + TermsValuesSourceBuilder histo = new TermsValuesSourceBuilder("date").field("date"); return new CompositeAggregationBuilder("name", Collections.singletonList(histo)); }, (result) -> { @@ -1691,76 +1676,76 @@ public void testWithDateHistogramAndFormat() throws IOException { createDocument("long", 4L) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date")), dataset, - () -> { - DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") - .field("date") - .fixedInterval(DateHistogramInterval.days(1)) - .format("yyyy-MM-dd"); - return new CompositeAggregationBuilder("name", Collections.singletonList(histo)); - }, - (result) -> { - assertEquals(3, result.getBuckets().size()); - assertEquals("{date=2017-10-20}", result.afterKey().toString()); - assertEquals("{date=2016-09-20}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("{date=2017-10-19}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(1).getDocCount()); - assertEquals("{date=2017-10-20}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(2).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date")), dataset, () -> { + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") + .fixedInterval(DateHistogramInterval.days(1)) + .format("yyyy-MM-dd"); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)); + }, (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{date=2017-10-20}", result.afterKey().toString()); + assertEquals("{date=2016-09-20}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{date=2017-10-19}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals("{date=2017-10-20}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(2).getDocCount()); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date")), dataset, - () -> { - DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") - .field("date") - .fixedInterval(DateHistogramInterval.days(1)) - .format("yyyy-MM-dd"); - return new CompositeAggregationBuilder("name", Collections.singletonList(histo)) - .aggregateAfter(createAfterKey("date", "2016-09-20")); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date")), dataset, () -> { + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") + .fixedInterval(DateHistogramInterval.days(1)) + .format("yyyy-MM-dd"); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)).aggregateAfter( + createAfterKey("date", "2016-09-20") + ); - }, (result) -> { - assertEquals(2, result.getBuckets().size()); - assertEquals("{date=2017-10-20}", result.afterKey().toString()); - assertEquals("{date=2017-10-19}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(0).getDocCount()); - assertEquals("{date=2017-10-20}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - } - ); + }, (result) -> { + assertEquals(2, result.getBuckets().size()); + assertEquals("{date=2017-10-20}", result.afterKey().toString()); + assertEquals("{date=2017-10-19}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("{date=2017-10-20}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + }); } public void testThatDateHistogramFailsFormatAfter() throws IOException { - ElasticsearchParseException exc = expectThrows(ElasticsearchParseException.class, - () -> testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date")), Collections.emptyList(), + ElasticsearchParseException exc = expectThrows( + ElasticsearchParseException.class, + () -> testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date")), + Collections.emptyList(), () -> { - DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") - .field("date") + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") .fixedInterval(DateHistogramInterval.days(1)) .format("yyyy-MM-dd"); - return new CompositeAggregationBuilder("name", Collections.singletonList(histo)) - .aggregateAfter(createAfterKey("date", "now")); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)).aggregateAfter( + createAfterKey("date", "now") + ); }, - (result) -> { - } - )); + (result) -> {} + ) + ); assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(exc.getCause().getMessage(), containsString("now() is not supported in [after] key")); - exc = expectThrows(ElasticsearchParseException.class, - () -> testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date")), Collections.emptyList(), + exc = expectThrows( + ElasticsearchParseException.class, + () -> testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date")), + Collections.emptyList(), () -> { - DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") - .field("date") + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") .fixedInterval(DateHistogramInterval.days(1)) .format("yyyy-MM-dd"); - return new CompositeAggregationBuilder("name", Collections.singletonList(histo)) - .aggregateAfter(createAfterKey("date", "1474329600000")); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)).aggregateAfter( + createAfterKey("date", "1474329600000") + ); }, - (result) -> { - } - )); + (result) -> {} + ) + ); assertThat(exc.getMessage(), containsString("failed to parse date field [1474329600000]")); } @@ -1776,22 +1761,20 @@ public void testWithDateHistogramAndKeyword() throws IOException { createDocument("long", 4L) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date"), - LongPoint.newRangeQuery( - "date", - asLong("2016-09-20T09:00:34"), - asLong("2017-10-20T06:09:24") - )), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new DateHistogramValuesSourceBuilder("date") - .field("date") - .fixedInterval(DateHistogramInterval.days(1)), - new TermsValuesSourceBuilder("keyword") - .field("keyword") - ) - ), + testSearchCase( + Arrays.asList( + new MatchAllDocsQuery(), + new DocValuesFieldExistsQuery("date"), + LongPoint.newRangeQuery("date", asLong("2016-09-20T09:00:34"), asLong("2017-10-20T06:09:24")) + ), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new DateHistogramValuesSourceBuilder("date").field("date").fixedInterval(DateHistogramInterval.days(1)), + new TermsValuesSourceBuilder("keyword").field("keyword") + ) + ), (result) -> { assertEquals(7, result.getBuckets().size()); assertEquals("{date=1508457600000, keyword=d}", result.afterKey().toString()); @@ -1812,23 +1795,21 @@ public void testWithDateHistogramAndKeyword() throws IOException { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date"), - LongPoint.newRangeQuery( - "date", - asLong("2016-09-20T11:34:00"), - asLong("2017-10-20T06:09:24") - )), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new DateHistogramValuesSourceBuilder("date") - .field("date") - .fixedInterval(DateHistogramInterval.days(1)), - new TermsValuesSourceBuilder("keyword") - .field("keyword") - ) - ).aggregateAfter(createAfterKey("date", 1508371200000L, "keyword", "g")) - , (result) -> { + testSearchCase( + Arrays.asList( + new MatchAllDocsQuery(), + new DocValuesFieldExistsQuery("date"), + LongPoint.newRangeQuery("date", asLong("2016-09-20T11:34:00"), asLong("2017-10-20T06:09:24")) + ), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new DateHistogramValuesSourceBuilder("date").field("date").fixedInterval(DateHistogramInterval.days(1)), + new TermsValuesSourceBuilder("keyword").field("keyword") + ) + ).aggregateAfter(createAfterKey("date", 1508371200000L, "keyword", "g")), + (result) -> { assertEquals(3, result.getBuckets().size()); assertEquals("{date=1508457600000, keyword=d}", result.afterKey().toString()); assertEquals("{date=1508457600000, keyword=a}", result.getBuckets().get(0).getKeyAsString()); @@ -1853,15 +1834,17 @@ public void testWithKeywordAndHistogram() throws IOException { createDocument("long", 4L) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("price")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new HistogramValuesSourceBuilder("price").field("price").interval(10) - ) + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("price")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new TermsValuesSourceBuilder("keyword").field("keyword"), + new HistogramValuesSourceBuilder("price").field("price").interval(10) ) - , (result) -> { + ), + (result) -> { assertEquals(7, result.getBuckets().size()); assertEquals("{keyword=z, price=50.0}", result.afterKey().toString()); assertEquals("{keyword=a, price=100.0}", result.getBuckets().get(0).getKeyAsString()); @@ -1881,15 +1864,17 @@ public void testWithKeywordAndHistogram() throws IOException { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("price")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new HistogramValuesSourceBuilder("price").field("price").interval(10) - ) - ).aggregateAfter(createAfterKey("keyword", "c", "price", 50.0)) - , (result) -> { + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("price")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new TermsValuesSourceBuilder("keyword").field("keyword"), + new HistogramValuesSourceBuilder("price").field("price").interval(10) + ) + ).aggregateAfter(createAfterKey("keyword", "c", "price", 50.0)), + (result) -> { assertEquals(4, result.getBuckets().size()); assertEquals("{keyword=z, price=50.0}", result.afterKey().toString()); assertEquals("{keyword=c, price=100.0}", result.getBuckets().get(0).getKeyAsString()); @@ -1921,15 +1906,17 @@ public void testWithHistogramAndKeyword() throws IOException { createDocument("long", 4L) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("double")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new HistogramValuesSourceBuilder("histo").field("double").interval(0.1), - new TermsValuesSourceBuilder("keyword").field("keyword") - ) + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("double")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new HistogramValuesSourceBuilder("histo").field("double").interval(0.1), + new TermsValuesSourceBuilder("keyword").field("keyword") ) - , (result) -> { + ), + (result) -> { assertEquals(8, result.getBuckets().size()); assertEquals("{histo=0.9, keyword=d}", result.afterKey().toString()); assertEquals("{histo=0.4, keyword=a}", result.getBuckets().get(0).getKeyAsString()); @@ -1951,15 +1938,17 @@ public void testWithHistogramAndKeyword() throws IOException { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("double")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new HistogramValuesSourceBuilder("histo").field("double").interval(0.1), - new TermsValuesSourceBuilder("keyword").field("keyword") - ) - ).aggregateAfter(createAfterKey("histo", 0.8d, "keyword", "b")) - , (result) -> { + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("double")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new HistogramValuesSourceBuilder("histo").field("double").interval(0.1), + new TermsValuesSourceBuilder("keyword").field("keyword") + ) + ).aggregateAfter(createAfterKey("histo", 0.8d, "keyword", "b")), + (result) -> { assertEquals(3, result.getBuckets().size()); assertEquals("{histo=0.9, keyword=d}", result.afterKey().toString()); assertEquals("{histo=0.8, keyword=z}", result.getBuckets().get(0).getKeyAsString()); @@ -1984,16 +1973,17 @@ public void testWithKeywordAndDateHistogram() throws IOException { createDocument("long", 4L) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new DateHistogramValuesSourceBuilder("date_histo").field("date") - .fixedInterval(DateHistogramInterval.days(1)) - ) + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new TermsValuesSourceBuilder("keyword").field("keyword"), + new DateHistogramValuesSourceBuilder("date_histo").field("date").fixedInterval(DateHistogramInterval.days(1)) ) - , (result) -> { + ), + (result) -> { assertEquals(7, result.getBuckets().size()); assertEquals("{keyword=z, date_histo=1474329600000}", result.afterKey().toString()); assertEquals("{keyword=a, date_histo=1508457600000}", result.getBuckets().get(0).getKeyAsString()); @@ -2013,16 +2003,17 @@ public void testWithKeywordAndDateHistogram() throws IOException { } ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new DateHistogramValuesSourceBuilder("date_histo").field("date") - .fixedInterval(DateHistogramInterval.days(1)) - ) - ).aggregateAfter(createAfterKey("keyword", "c", "date_histo", 1474329600000L)) - , (result) -> { + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new TermsValuesSourceBuilder("keyword").field("keyword"), + new DateHistogramValuesSourceBuilder("date_histo").field("date").fixedInterval(DateHistogramInterval.days(1)) + ) + ).aggregateAfter(createAfterKey("keyword", "c", "date_histo", 1474329600000L)), + (result) -> { assertEquals(4, result.getBuckets().size()); assertEquals("{keyword=z, date_histo=1474329600000}", result.afterKey().toString()); assertEquals("{keyword=c, date_histo=1508457600000}", result.getBuckets().get(0).getKeyAsString()); @@ -2048,78 +2039,71 @@ public void testWithKeywordAndTopHits() throws Exception { createDocument("keyword", "c") ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")); - }, (result) -> { - assertEquals(3, result.getBuckets().size()); - assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); - assertNotNull(topHits); - assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); - assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); - assertNotNull(topHits); - assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); - assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(2).getDocCount()); - topHits = result.getBuckets().get(2).getAggregations().get("top_hits"); - assertNotNull(topHits); - assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).subAggregation( + new TopHitsAggregationBuilder("top_hits").storedField("_none_") + ); + }, (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); + assertNotNull(topHits); + assertEquals(topHits.getHits().getHits().length, 2); + assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); + assertNotNull(topHits); + assertEquals(topHits.getHits().getHits().length, 2); + assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + topHits = result.getBuckets().get(2).getAggregations().get("top_hits"); + assertNotNull(topHits); + assertEquals(topHits.getHits().getHits().length, 1); + assertEquals(topHits.getHits().getTotalHits().value, 1L); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .aggregateAfter(Collections.singletonMap("keyword", "a")) - .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")); - }, (result) -> { - assertEquals(2, result.getBuckets().size()); - assertEquals("{keyword=c}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); - assertNotNull(topHits); - assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); - assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(1).getDocCount()); - topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); - assertNotNull(topHits); - assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).aggregateAfter( + Collections.singletonMap("keyword", "a") + ).subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")); + }, (result) -> { + assertEquals(2, result.getBuckets().size()); + assertEquals("{keyword=c}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); + assertNotNull(topHits); + assertEquals(topHits.getHits().getHits().length, 2); + assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); + assertNotNull(topHits); + assertEquals(topHits.getHits().getHits().length, 1); + assertEquals(topHits.getHits().getTotalHits().value, 1L); + }); } public void testWithTermsSubAggExecutionMode() throws Exception { // test with no bucket for (Aggregator.SubAggCollectionMode mode : Aggregator.SubAggCollectionMode.values()) { - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), Collections.singletonList(createDocument()), () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .subAggregation( - new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) - .field("terms") - .collectMode(mode) - .subAggregation(new MaxAggregationBuilder("max").field("long")) - ); - }, (result) -> { - assertEquals(0, result.getBuckets().size()); - } + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).subAggregation( + new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) + .field("terms") + .collectMode(mode) + .subAggregation(new MaxAggregationBuilder("max").field("long")) + ); + }, + (result) -> { assertEquals(0, result.getBuckets().size()); } ); } @@ -2134,51 +2118,47 @@ public void testWithTermsSubAggExecutionMode() throws Exception { ) ); for (Aggregator.SubAggCollectionMode mode : Aggregator.SubAggCollectionMode.values()) { - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword") - .field("keyword"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .subAggregation( - new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) - .field("terms") - .collectMode(mode) - .subAggregation(new MaxAggregationBuilder("max").field("long")) - ); - }, (result) -> { - assertEquals(3, result.getBuckets().size()); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("keyword")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("keyword").field("keyword"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).subAggregation( + new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) + .field("terms") + .collectMode(mode) + .subAggregation(new MaxAggregationBuilder("max").field("long")) + ); + }, (result) -> { + assertEquals(3, result.getBuckets().size()); - assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - StringTerms subTerms = result.getBuckets().get(0).getAggregations().get("terms"); - assertEquals(2, subTerms.getBuckets().size()); - assertEquals("a", subTerms.getBuckets().get(0).getKeyAsString()); - assertEquals("w", subTerms.getBuckets().get(1).getKeyAsString()); - InternalMax max = subTerms.getBuckets().get(0).getAggregations().get("max"); - assertEquals(50L, (long) max.getValue()); - max = subTerms.getBuckets().get(1).getAggregations().get("max"); - assertEquals(78L, (long) max.getValue()); - - assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - subTerms = result.getBuckets().get(1).getAggregations().get("terms"); - assertEquals(2, subTerms.getBuckets().size()); - assertEquals("d", subTerms.getBuckets().get(0).getKeyAsString()); - assertEquals("y", subTerms.getBuckets().get(1).getKeyAsString()); - max = subTerms.getBuckets().get(0).getAggregations().get("max"); - assertEquals(78L, (long) max.getValue()); - max = subTerms.getBuckets().get(1).getAggregations().get("max"); - assertEquals(70L, (long) max.getValue()); - - assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(2).getDocCount()); - subTerms = result.getBuckets().get(2).getAggregations().get("terms"); - assertEquals(1, subTerms.getBuckets().size()); - assertEquals("y", subTerms.getBuckets().get(0).getKeyAsString()); - max = subTerms.getBuckets().get(0).getAggregations().get("max"); - assertEquals(76L, (long) max.getValue()); - } - ); + assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + StringTerms subTerms = result.getBuckets().get(0).getAggregations().get("terms"); + assertEquals(2, subTerms.getBuckets().size()); + assertEquals("a", subTerms.getBuckets().get(0).getKeyAsString()); + assertEquals("w", subTerms.getBuckets().get(1).getKeyAsString()); + InternalMax max = subTerms.getBuckets().get(0).getAggregations().get("max"); + assertEquals(50L, (long) max.getValue()); + max = subTerms.getBuckets().get(1).getAggregations().get("max"); + assertEquals(78L, (long) max.getValue()); + + assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + subTerms = result.getBuckets().get(1).getAggregations().get("terms"); + assertEquals(2, subTerms.getBuckets().size()); + assertEquals("d", subTerms.getBuckets().get(0).getKeyAsString()); + assertEquals("y", subTerms.getBuckets().get(1).getKeyAsString()); + max = subTerms.getBuckets().get(0).getAggregations().get("max"); + assertEquals(78L, (long) max.getValue()); + max = subTerms.getBuckets().get(1).getAggregations().get("max"); + assertEquals(70L, (long) max.getValue()); + + assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + subTerms = result.getBuckets().get(2).getAggregations().get("terms"); + assertEquals(1, subTerms.getBuckets().size()); + assertEquals("y", subTerms.getBuckets().get(0).getKeyAsString()); + max = subTerms.getBuckets().get(0).getAggregations().get("max"); + assertEquals(76L, (long) max.getValue()); + }); } } @@ -2226,9 +2206,11 @@ public void testNullSourceNonNullCollection() { assertThat(e.getMessage(), equalTo("Composite source cannot be null")); } - private , V extends Comparable> void testRandomTerms(String field, - Supplier randomSupplier, - Function transformKey) throws IOException { + private , V extends Comparable> void testRandomTerms( + String field, + Supplier randomSupplier, + Function transformKey + ) throws IOException { int numTerms = randomIntBetween(10, 500); List terms = new ArrayList<>(); for (int i = 0; i < numTerms; i++) { @@ -2245,8 +2227,7 @@ private , V extends Comparable> void testRandomTerms( for (int j = 0; j < numValues; j++) { int rand = randomIntBetween(0, terms.size() - 1); if (values.add(terms.get(rand))) { - AtomicLong count = expectedDocCounts.computeIfAbsent(terms.get(rand), - (k) -> new AtomicLong(0)); + AtomicLong count = expectedDocCounts.computeIfAbsent(terms.get(rand), (k) -> new AtomicLong(0)); count.incrementAndGet(); valuesSet.add(terms.get(rand)); } @@ -2260,27 +2241,25 @@ private , V extends Comparable> void testRandomTerms( AtomicBoolean finish = new AtomicBoolean(false); int size = randomIntBetween(1, expected.size()); while (finish.get() == false) { - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(field)), dataset, - () -> { - Map afterKey = null; - if (seen.size() > 0) { - afterKey = Collections.singletonMap(field, seen.get(seen.size() - 1)); - } - TermsValuesSourceBuilder source = new TermsValuesSourceBuilder(field).field(field); - return new CompositeAggregationBuilder("name", Collections.singletonList(source)) - .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")) - .aggregateAfter(afterKey) - .size(size); - }, (result) -> { - if (result.getBuckets().size() == 0) { - finish.set(true); - } - for (InternalComposite.InternalBucket bucket : result.getBuckets()) { - V term = transformKey.apply(bucket.getKey().get(field)); - seen.add(term); - assertThat(bucket.getDocCount(), equalTo(expectedDocCounts.get(term).get())); - } - }); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(field)), dataset, () -> { + Map afterKey = null; + if (seen.size() > 0) { + afterKey = Collections.singletonMap(field, seen.get(seen.size() - 1)); + } + TermsValuesSourceBuilder source = new TermsValuesSourceBuilder(field).field(field); + return new CompositeAggregationBuilder("name", Collections.singletonList(source)).subAggregation( + new TopHitsAggregationBuilder("top_hits").storedField("_none_") + ).aggregateAfter(afterKey).size(size); + }, (result) -> { + if (result.getBuckets().size() == 0) { + finish.set(true); + } + for (InternalComposite.InternalBucket bucket : result.getBuckets()) { + V term = transformKey.apply(bucket.getKey().get(field)); + seen.add(term); + assertThat(bucket.getDocCount(), equalTo(expectedDocCounts.get(term).get())); + } + }); } assertEquals(expected, seen); } @@ -2296,38 +2275,33 @@ public void testWithIP() throws Exception { createDocument("ip", InetAddress.getByName("192.168.0.1")) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("ip")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("ip") - .field("ip"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); - }, (result) -> { - assertEquals(3, result.getBuckets().size()); - assertEquals("{ip=192.168.0.1}", result.afterKey().toString()); - assertEquals("{ip=::1}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("{ip=127.0.0.1}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(1).getDocCount()); - assertEquals("{ip=192.168.0.1}", result.getBuckets().get(2).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(2).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("ip")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("ip").field("ip"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); + }, (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{ip=192.168.0.1}", result.afterKey().toString()); + assertEquals("{ip=::1}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{ip=127.0.0.1}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals("{ip=192.168.0.1}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(2).getDocCount()); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("ip")), dataset, - () -> { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("ip") - .field("ip"); - return new CompositeAggregationBuilder("name", Collections.singletonList(terms)) - .aggregateAfter(Collections.singletonMap("ip", "::1")); - }, (result) -> { - assertEquals(2, result.getBuckets().size()); - assertEquals("{ip=192.168.0.1}", result.afterKey().toString()); - assertEquals("{ip=127.0.0.1}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(0).getDocCount()); - assertEquals("{ip=192.168.0.1}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("ip")), dataset, () -> { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("ip").field("ip"); + return new CompositeAggregationBuilder("name", Collections.singletonList(terms)).aggregateAfter( + Collections.singletonMap("ip", "::1") + ); + }, (result) -> { + assertEquals(2, result.getBuckets().size()); + assertEquals("{ip=192.168.0.1}", result.afterKey().toString()); + assertEquals("{ip=127.0.0.1}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("{ip=192.168.0.1}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + }); } public void testWithGeoPoint() throws Exception { @@ -2341,34 +2315,29 @@ public void testWithGeoPoint() throws Exception { createDocument("geo_point", new GeoPoint(90.0, 0.0)) ) ); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("geo_point")), dataset, - () -> { - GeoTileGridValuesSourceBuilder geoTile = new GeoTileGridValuesSourceBuilder("geo_point") - .field("geo_point"); - return new CompositeAggregationBuilder("name", Collections.singletonList(geoTile)); - }, (result) -> { - assertEquals(2, result.getBuckets().size()); - assertEquals("{geo_point=7/64/56}", result.afterKey().toString()); - assertEquals("{geo_point=7/32/56}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("{geo_point=7/64/56}", result.getBuckets().get(1).getKeyAsString()); - assertEquals(3L, result.getBuckets().get(1).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("geo_point")), dataset, () -> { + GeoTileGridValuesSourceBuilder geoTile = new GeoTileGridValuesSourceBuilder("geo_point").field("geo_point"); + return new CompositeAggregationBuilder("name", Collections.singletonList(geoTile)); + }, (result) -> { + assertEquals(2, result.getBuckets().size()); + assertEquals("{geo_point=7/64/56}", result.afterKey().toString()); + assertEquals("{geo_point=7/32/56}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{geo_point=7/64/56}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(3L, result.getBuckets().get(1).getDocCount()); + }); - testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("geo_point")), dataset, - () -> { - GeoTileGridValuesSourceBuilder geoTile = new GeoTileGridValuesSourceBuilder("geo_point") - .field("geo_point"); - return new CompositeAggregationBuilder("name", Collections.singletonList(geoTile)) - .aggregateAfter(Collections.singletonMap("geo_point", "7/32/56")); - }, (result) -> { - assertEquals(1, result.getBuckets().size()); - assertEquals("{geo_point=7/64/56}", result.afterKey().toString()); - assertEquals("{geo_point=7/64/56}", result.getBuckets().get(0).getKeyAsString()); - assertEquals(3L, result.getBuckets().get(0).getDocCount()); - } - ); + testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("geo_point")), dataset, () -> { + GeoTileGridValuesSourceBuilder geoTile = new GeoTileGridValuesSourceBuilder("geo_point").field("geo_point"); + return new CompositeAggregationBuilder("name", Collections.singletonList(geoTile)).aggregateAfter( + Collections.singletonMap("geo_point", "7/32/56") + ); + }, (result) -> { + assertEquals(1, result.getBuckets().size()); + assertEquals("{geo_point=7/64/56}", result.afterKey().toString()); + assertEquals("{geo_point=7/64/56}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(3L, result.getBuckets().get(0).getDocCount()); + }); } public void testEarlyTermination() throws Exception { @@ -2386,14 +2355,15 @@ public void testEarlyTermination() throws Exception { ) ); - executeTestCase(true, true, new TermQuery(new Term("foo", "bar")), + executeTestCase( + true, + true, + new TermQuery(new Term("foo", "bar")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new TermsValuesSourceBuilder("keyword").field("keyword"), - new TermsValuesSourceBuilder("long").field("long") - )).aggregateAfter(createAfterKey("keyword", "b", "long", 10L)).size(2), + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new TermsValuesSourceBuilder("keyword").field("keyword"), new TermsValuesSourceBuilder("long").field("long")) + ).aggregateAfter(createAfterKey("keyword", "b", "long", 10L)).size(2), (result) -> { assertEquals(2, result.getBuckets().size()); assertEquals("{keyword=c, long=100}", result.afterKey().toString()); @@ -2406,16 +2376,19 @@ public void testEarlyTermination() throws Exception { ); // source field and index sorting config have different order - executeTestCase(true, true, new TermQuery(new Term("foo", "bar")), + executeTestCase( + true, + true, + new TermQuery(new Term("foo", "bar")), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - // reverse source order - new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC), - new TermsValuesSourceBuilder("long").field("long").order(SortOrder.DESC) - ) - ).aggregateAfter(createAfterKey("keyword", "c", "long", 10L)).size(2), + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + // reverse source order + new TermsValuesSourceBuilder("keyword").field("keyword").order(SortOrder.DESC), + new TermsValuesSourceBuilder("long").field("long").order(SortOrder.DESC) + ) + ).aggregateAfter(createAfterKey("keyword", "c", "long", 10L)).size(2), (result) -> { assertEquals(2, result.getBuckets().size()); assertEquals("{keyword=a, long=100}", result.afterKey().toString()); @@ -2443,17 +2416,20 @@ public void testIndexSortWithDuplicate() throws Exception { ); for (SortOrder order : SortOrder.values()) { - executeTestCase(false, true, new MatchAllDocsQuery(), + executeTestCase( + false, + true, + new MatchAllDocsQuery(), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new DateHistogramValuesSourceBuilder("date") - .field("date") - .order(order) - .calendarInterval(DateHistogramInterval.days(1)), - new TermsValuesSourceBuilder("keyword").field("keyword") - )).size(3), + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new DateHistogramValuesSourceBuilder("date").field("date") + .order(order) + .calendarInterval(DateHistogramInterval.days(1)), + new TermsValuesSourceBuilder("keyword").field("keyword") + ) + ).size(3), (result) -> { assertEquals(3, result.getBuckets().size()); assertEquals("{date=1591142400000, keyword=31640}", result.afterKey().toString()); @@ -2466,17 +2442,20 @@ public void testIndexSortWithDuplicate() throws Exception { } ); - executeTestCase(false, true, new MatchAllDocsQuery(), + executeTestCase( + false, + true, + new MatchAllDocsQuery(), dataset, - () -> - new CompositeAggregationBuilder("name", - Arrays.asList( - new DateHistogramValuesSourceBuilder("date") - .field("date") - .order(order) - .calendarInterval(DateHistogramInterval.days(1)), - new TermsValuesSourceBuilder("keyword").field("keyword") - )).aggregateAfter(createAfterKey("date", 1591142400000L, "keyword", "31640")).size(3), + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new DateHistogramValuesSourceBuilder("date").field("date") + .order(order) + .calendarInterval(DateHistogramInterval.days(1)), + new TermsValuesSourceBuilder("keyword").field("keyword") + ) + ).aggregateAfter(createAfterKey("date", 1591142400000L, "keyword", "31640")).size(3), (result) -> { assertEquals(3, result.getBuckets().size()); assertEquals("{date=1591142400000, keyword=91640}", result.afterKey().toString()); @@ -2491,24 +2470,28 @@ public void testIndexSortWithDuplicate() throws Exception { } } - private void testSearchCase(List queries, - List>> dataset, - Supplier create, - Consumer verify) throws IOException { + private void testSearchCase( + List queries, + List>> dataset, + Supplier create, + Consumer verify + ) throws IOException { for (Query query : queries) { executeTestCase(false, false, query, dataset, create, verify); executeTestCase(false, true, query, dataset, create, verify); } } - private void executeTestCase(boolean forceMerge, - boolean useIndexSort, - Query query, - List>> dataset, - Supplier create, - Consumer verify) throws IOException { - Map types = - Arrays.stream(FIELD_TYPES).collect(Collectors.toMap(MappedFieldType::name, Function.identity())); + private void executeTestCase( + boolean forceMerge, + boolean useIndexSort, + Query query, + List>> dataset, + Supplier create, + Consumer verify + ) throws IOException { + Map types = Arrays.stream(FIELD_TYPES) + .collect(Collectors.toMap(MappedFieldType::name, Function.identity())); CompositeAggregationBuilder aggregationBuilder = create.get(); Sort indexSort = useIndexSort ? buildIndexSort(aggregationBuilder.sources(), types) : null; IndexSettings indexSettings = createIndexSettings(indexSort); @@ -2555,12 +2538,8 @@ private void executeTestCase(boolean forceMerge, private static IndexSettings createIndexSettings(Sort sort) { Settings.Builder builder = Settings.builder(); if (sort != null) { - String[] fields = Arrays.stream(sort.getSort()) - .map(SortField::getField) - .toArray(String[]::new); - String[] orders = Arrays.stream(sort.getSort()) - .map((o) -> o.getReverse() ? "desc" : "asc") - .toArray(String[]::new); + String[] fields = Arrays.stream(sort.getSort()).map(SortField::getField).toArray(String[]::new); + String[] orders = Arrays.stream(sort.getSort()).map((o) -> o.getReverse() ? "desc" : "asc").toArray(String[]::new); builder.putList("index.sort.field", fields); builder.putList("index.sort.order", orders); } @@ -2592,9 +2571,13 @@ private void addToDocument(Document doc, Map> keys) { doc.add(new SortedSetDocValuesField(name, new BytesRef(InetAddressPoint.encode((InetAddress) value)))); doc.add(new InetAddressPoint(name, (InetAddress) value)); } else if (value instanceof GeoPoint) { - GeoPoint point = (GeoPoint)value; - doc.add(new SortedNumericDocValuesField(name, - GeoTileUtils.longEncode(point.lon(), point.lat(), GeoTileGridAggregationBuilder.DEFAULT_PRECISION))); + GeoPoint point = (GeoPoint) value; + doc.add( + new SortedNumericDocValuesField( + name, + GeoTileUtils.longEncode(point.lon(), point.lat(), GeoTileGridAggregationBuilder.DEFAULT_PRECISION) + ) + ); doc.add(new LatLonPoint(name, point.lat(), point.lon())); } else { throw new AssertionError("invalid object: " + value.getClass().getSimpleName()); @@ -2606,9 +2589,9 @@ private void addToDocument(Document doc, Map> keys) { private static Map createAfterKey(Object... fields) { assert fields.length % 2 == 0; final Map map = new HashMap<>(); - for (int i = 0; i < fields.length; i+=2) { + for (int i = 0; i < fields.length; i += 2) { String field = (String) fields[i]; - map.put(field, fields[i+1]); + map.put(field, fields[i + 1]); } return map; } @@ -2617,12 +2600,12 @@ private static Map createAfterKey(Object... fields) { private static Map> createDocument(Object... fields) { assert fields.length % 2 == 0; final Map> map = new HashMap<>(); - for (int i = 0; i < fields.length; i+=2) { + for (int i = 0; i < fields.length; i += 2) { String field = (String) fields[i]; - if (fields[i+1] instanceof List) { - map.put(field, (List) fields[i+1]); + if (fields[i + 1] instanceof List) { + map.put(field, (List) fields[i + 1]); } else { - map.put(field, Collections.singletonList(fields[i+1])); + map.put(field, Collections.singletonList(fields[i + 1])); } } return map; @@ -2634,10 +2617,10 @@ private Document createNestedDocument(String id, String nestedPath, Object... ra doc.add(new Field(IdFieldMapper.NAME, Uid.encodeId(id), IdFieldMapper.Defaults.NESTED_FIELD_TYPE)); doc.add(new Field(NestedPathFieldMapper.NAME, nestedPath, NestedPathFieldMapper.Defaults.FIELD_TYPE)); Object[] fields = new Object[rawFields.length]; - for (int i = 0; i < fields.length; i+=2) { + for (int i = 0; i < fields.length; i += 2) { assert rawFields[i] instanceof String; - fields[i] = nestedPath + "." + rawFields[i]; - fields[i+1] = rawFields[i+1]; + fields[i] = nestedPath + "." + rawFields[i]; + fields[i + 1] = rawFields[i + 1]; } addToDocument(doc, createDocument(fields)); return doc; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index 5a08b7554f56e..9a5cdcbfcc70a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -64,21 +64,25 @@ static class ClassAndName { } public void testRandomLong() throws IOException { - testRandomCase(new ClassAndName(createNumber("long", LONG) , Long.class)); + testRandomCase(new ClassAndName(createNumber("long", LONG), Long.class)); } public void testRandomDouble() throws IOException { - testRandomCase(new ClassAndName(createNumber("double", DOUBLE) , Double.class)); + testRandomCase(new ClassAndName(createNumber("double", DOUBLE), Double.class)); } public void testRandomDoubleAndLong() throws IOException { - testRandomCase(new ClassAndName(createNumber("double", DOUBLE), Double.class), - new ClassAndName(createNumber("long", LONG), Long.class)); + testRandomCase( + new ClassAndName(createNumber("double", DOUBLE), Double.class), + new ClassAndName(createNumber("long", LONG), Long.class) + ); } public void testRandomDoubleAndKeyword() throws IOException { - testRandomCase(new ClassAndName(createNumber("double", DOUBLE), Double.class), - new ClassAndName(createKeyword("keyword"), BytesRef.class)); + testRandomCase( + new ClassAndName(createNumber("double", DOUBLE), Double.class), + new ClassAndName(createKeyword("keyword"), BytesRef.class) + ); } public void testRandomKeyword() throws IOException { @@ -86,23 +90,31 @@ public void testRandomKeyword() throws IOException { } public void testRandomLongAndKeyword() throws IOException { - testRandomCase(new ClassAndName(createNumber("long", LONG), Long.class), - new ClassAndName(createKeyword("keyword"), BytesRef.class)); + testRandomCase( + new ClassAndName(createNumber("long", LONG), Long.class), + new ClassAndName(createKeyword("keyword"), BytesRef.class) + ); } public void testRandomLongAndDouble() throws IOException { - testRandomCase(new ClassAndName(createNumber("long", LONG), Long.class), - new ClassAndName(createNumber("double", DOUBLE) , Double.class)); + testRandomCase( + new ClassAndName(createNumber("long", LONG), Long.class), + new ClassAndName(createNumber("double", DOUBLE), Double.class) + ); } public void testRandomKeywordAndLong() throws IOException { - testRandomCase(new ClassAndName(createKeyword("keyword"), BytesRef.class), - new ClassAndName(createNumber("long", LONG), Long.class)); + testRandomCase( + new ClassAndName(createKeyword("keyword"), BytesRef.class), + new ClassAndName(createNumber("long", LONG), Long.class) + ); } public void testRandomKeywordAndDouble() throws IOException { - testRandomCase(new ClassAndName(createKeyword("keyword"), BytesRef.class), - new ClassAndName(createNumber("double", DOUBLE), Double.class)); + testRandomCase( + new ClassAndName(createKeyword("keyword"), BytesRef.class), + new ClassAndName(createNumber("double", DOUBLE), Double.class) + ); } public void testRandom() throws IOException { @@ -121,7 +133,7 @@ public void testRandom() throws IOException { types[i] = new ClassAndName(createKeyword(Integer.toString(i)), BytesRef.class); break; default: - assert(false); + assert (false); } } testRandomCase(types); @@ -136,10 +148,8 @@ private void testRandomCase(ClassAndName... types) throws IOException { } } - private void testRandomCase(boolean forceMerge, - boolean missingBucket, - int indexSortSourcePrefix, - ClassAndName... types) throws IOException { + private void testRandomCase(boolean forceMerge, boolean missingBucket, int indexSortSourcePrefix, ClassAndName... types) + throws IOException { final BigArrays bigArrays = BigArrays.NON_RECYCLING_INSTANCE; int numDocs = randomIntBetween(50, 100); List[]> possibleValues = new ArrayList<>(); @@ -188,7 +198,7 @@ private void testRandomCase(boolean forceMerge, List>> docValues = new ArrayList<>(); boolean hasAllField = true; for (int j = 0; j < types.length; j++) { - int numValues = indexSortSourcePrefix-1 >= j ? 1 : randomIntBetween(0, 5); + int numValues = indexSortSourcePrefix - 1 >= j ? 1 : randomIntBetween(0, 5); List> values = new ArrayList<>(); if (numValues == 0) { hasAllField = false; @@ -203,8 +213,12 @@ private void testRandomCase(boolean forceMerge, document.add(new SortedNumericDocValuesField(types[j].fieldType.name(), value)); document.add(new LongPoint(types[j].fieldType.name(), value)); } else if (types[j].clazz == Double.class) { - document.add(new SortedNumericDocValuesField(types[j].fieldType.name(), - NumericUtils.doubleToSortableLong((Double) values.get(k)))); + document.add( + new SortedNumericDocValuesField( + types[j].fieldType.name(), + NumericUtils.doubleToSortableLong((Double) values.get(k)) + ) + ); } else if (types[j].clazz == BytesRef.class) { BytesRef value = (BytesRef) values.get(k); document.add(new SortedSetDocValuesField(types[j].fieldType.name(), (BytesRef) values.get(k))); @@ -277,25 +291,30 @@ private void testRandomCase(boolean forceMerge, ); } } else { - assert(false); + assert (false); } } CompositeKey[] expected = keys.toArray(new CompositeKey[0]); Arrays.sort(expected, (a, b) -> compareKey(a, b)); - for (boolean withProducer : new boolean[] {true, false}) { + for (boolean withProducer : new boolean[] { true, false }) { int pos = 0; CompositeKey last = null; while (pos < size) { - final CompositeValuesCollectorQueue queue = - new CompositeValuesCollectorQueue(BigArrays.NON_RECYCLING_INSTANCE, sources, size); + final CompositeValuesCollectorQueue queue = new CompositeValuesCollectorQueue( + BigArrays.NON_RECYCLING_INSTANCE, + sources, + size + ); if (last != null) { queue.setAfterKey(last); } final SortedDocsProducer docsProducer = sources[0].createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery()); for (LeafReaderContext leafReaderContext : reader.leaves()) { if (docsProducer != null && withProducer) { - assertEquals(DocIdSet.EMPTY, - docsProducer.processLeaf(new MatchAllDocsQuery(), queue, leafReaderContext, false)); + assertEquals( + DocIdSet.EMPTY, + docsProducer.processLeaf(new MatchAllDocsQuery(), queue, leafReaderContext, false) + ); } else { final LeafBucketCollector leafCollector = new LeafBucketCollector() { @Override @@ -368,8 +387,13 @@ private static List createListCombinations(List return keys; } - private static void createListCombinations(Comparable[] key, List>> values, - int pos, int maxPos, List keys) { + private static void createListCombinations( + Comparable[] key, + List>> values, + int pos, + int maxPos, + List keys + ) { if (pos == maxPos) { keys.add(new CompositeKey(key.clone())); } else { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java index 7d03695384d68..6c60177467a7e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java @@ -54,8 +54,11 @@ private static DocValueFormat randomDocValueFormat(boolean isLong) { if (isLong) { // we use specific format only for date histogram on a long/date field if (randomBoolean()) { - return new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), ZoneOffset.ofHours(1), - DateFieldMapper.Resolution.MILLISECONDS); + return new DocValueFormat.DateTime( + DateFormatter.forPattern("epoch_second"), + ZoneOffset.ofHours(1), + DateFieldMapper.Resolution.MILLISECONDS + ); } else { return DocValueFormat.RAW; } @@ -98,14 +101,17 @@ protected Class implementationClass() { return ParsedComposite.class; } - protected

P parseAndAssert(final InternalAggregation aggregation, - final boolean shuffled, final boolean addRandomFields) throws IOException { + protected

P parseAndAssert( + final InternalAggregation aggregation, + final boolean shuffled, + final boolean addRandomFields + ) throws IOException { return super.parseAndAssert(aggregation, false, false); } private CompositeKey createCompositeKey() { Comparable[] keys = new Comparable[sourceNames.size()]; - for (int j = 0; j < keys.length; j++) { + for (int j = 0; j < keys.length; j++) { switch (types[j]) { case 0: keys[j] = randomLong(); @@ -141,18 +147,24 @@ protected InternalComposite createTestInstance(String name, Map int numBuckets = randomIntBetween(0, size); List buckets = new ArrayList<>(); TreeSet keys = new TreeSet<>(getKeyComparator()); - for (int i = 0; i < numBuckets; i++) { + for (int i = 0; i < numBuckets; i++) { final CompositeKey key = createCompositeKey(); if (keys.contains(key)) { continue; } keys.add(key); - InternalComposite.InternalBucket bucket = - new InternalComposite.InternalBucket(sourceNames, formats, key, reverseMuls, 1L, aggregations); + InternalComposite.InternalBucket bucket = new InternalComposite.InternalBucket( + sourceNames, + formats, + key, + reverseMuls, + 1L, + aggregations + ); buckets.add(bucket); } Collections.sort(buckets, (o1, o2) -> o1.compareKey(o2)); - CompositeKey lastBucket = buckets.size() > 0 ? buckets.get(buckets.size()-1).getRawKey() : null; + CompositeKey lastBucket = buckets.size() > 0 ? buckets.get(buckets.size() - 1).getRawKey() : null; return new InternalComposite(name, size, sourceNames, formats, buckets, lastBucket, reverseMuls, randomBoolean(), metadata); } @@ -162,7 +174,7 @@ protected InternalComposite mutateInstance(InternalComposite instance) throws IO Map metadata = instance.getMetadata(); int code = randomIntBetween(0, 2); int[] reverseMuls = instance.getReverseMuls(); - switch(code) { + switch (code) { case 0: int[] newReverseMuls = new int[reverseMuls.length]; for (int i = 0; i < reverseMuls.length; i++) { @@ -172,8 +184,15 @@ protected InternalComposite mutateInstance(InternalComposite instance) throws IO break; case 1: buckets = new ArrayList<>(buckets); - buckets.add(new InternalComposite.InternalBucket(sourceNames, formats, createCompositeKey(), reverseMuls, - randomLongBetween(1, 100), InternalAggregations.EMPTY) + buckets.add( + new InternalComposite.InternalBucket( + sourceNames, + formats, + createCompositeKey(), + reverseMuls, + randomLongBetween(1, 100), + InternalAggregations.EMPTY + ) ); break; case 2: @@ -187,9 +206,18 @@ protected InternalComposite mutateInstance(InternalComposite instance) throws IO default: throw new AssertionError("illegal branch"); } - CompositeKey lastBucket = buckets.size() > 0 ? buckets.get(buckets.size()-1).getRawKey() : null; - return new InternalComposite(instance.getName(), instance.getSize(), sourceNames, formats, buckets, lastBucket, reverseMuls, - randomBoolean(), metadata); + CompositeKey lastBucket = buckets.size() > 0 ? buckets.get(buckets.size() - 1).getRawKey() : null; + return new InternalComposite( + instance.getName(), + instance.getSize(), + sourceNames, + formats, + buckets, + lastBucket, + reverseMuls, + randomBoolean(), + metadata + ); } @Override @@ -213,8 +241,7 @@ protected void assertReduced(InternalComposite reduced, List } public void testReduceSame() throws IOException { - InternalComposite result = createTestInstance(randomAlphaOfLength(10), Collections.emptyMap(), - InternalAggregations.EMPTY); + InternalComposite result = createTestInstance(randomAlphaOfLength(10), Collections.emptyMap(), InternalAggregations.EMPTY); List toReduce = new ArrayList<>(); int numSame = randomIntBetween(1, 10); for (int i = 0; i < numSame; i++) { @@ -226,7 +253,7 @@ public void testReduceSame() throws IOException { for (InternalComposite.InternalBucket bucket : finalReduce.getBuckets()) { InternalComposite.InternalBucket expectedBucket = expectedIt.next(); assertThat(bucket.getRawKey(), equalTo(expectedBucket.getRawKey())); - assertThat(bucket.getDocCount(), equalTo(expectedBucket.getDocCount()*numSame)); + assertThat(bucket.getDocCount(), equalTo(expectedBucket.getDocCount() * numSame)); } } @@ -236,8 +263,17 @@ public void testReduceSame() throws IOException { public void testReduceUnmapped() throws IOException { var mapped = createTestInstance(randomAlphaOfLength(10), emptyMap(), InternalAggregations.EMPTY); var rawFormats = formats.stream().map(f -> DocValueFormat.RAW).collect(toList()); - var unmapped = new InternalComposite(mapped.getName(), mapped.getSize(), sourceNames, - rawFormats, emptyList(), null, reverseMuls, true, emptyMap()); + var unmapped = new InternalComposite( + mapped.getName(), + mapped.getSize(), + sourceNames, + rawFormats, + emptyList(), + null, + reverseMuls, + true, + emptyMap() + ); List toReduce = Arrays.asList(unmapped, mapped); Collections.shuffle(toReduce, random()); InternalComposite finalReduce = (InternalComposite) unmapped.reduce(toReduce, emptyReduceContextBuilder().forFinalReduction()); @@ -255,104 +291,56 @@ public void testReduceUnmapped() throws IOException { } public void testCompareCompositeKeyBiggerFieldName() { - InternalComposite.ArrayMap key1 = createMap( - Arrays.asList("field1", "field2"), - new Comparable[]{1, 2} - ); - InternalComposite.ArrayMap key2 = createMap( - Arrays.asList("field3", "field2"), - new Comparable[]{1, 2} - ); + InternalComposite.ArrayMap key1 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { 1, 2 }); + InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field3", "field2"), new Comparable[] { 1, 2 }); assertThat(key1.compareTo(key2), lessThan(0)); } public void testCompareCompositeKeySmallerFieldName() { - InternalComposite.ArrayMap key1 = createMap( - Arrays.asList("field3", "field2"), - new Comparable[]{1, 2} - ); - InternalComposite.ArrayMap key2 = createMap( - Arrays.asList("field1", "field2"), - new Comparable[]{1, 2} - ); + InternalComposite.ArrayMap key1 = createMap(Arrays.asList("field3", "field2"), new Comparable[] { 1, 2 }); + InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { 1, 2 }); assertThat(key1.compareTo(key2), greaterThan(0)); } public void testCompareCompositeKeyBiggerValue() { - InternalComposite.ArrayMap key1 = createMap( - Arrays.asList("field1", "field2"), - new Comparable[]{1, 2} - ); - InternalComposite.ArrayMap key2 = createMap( - Arrays.asList("field3", "field2"), - new Comparable[]{2, 3} - ); + InternalComposite.ArrayMap key1 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { 1, 2 }); + InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field3", "field2"), new Comparable[] { 2, 3 }); assertThat(key1.compareTo(key2), lessThan(0)); } public void testCompareCompositeKeySmallerValue() { - InternalComposite.ArrayMap key1 = createMap( - Arrays.asList("field3", "field2"), - new Comparable[]{1, 2} - ); - InternalComposite.ArrayMap key2 = createMap( - Arrays.asList("field1", "field2"), - new Comparable[]{2, 3} - ); + InternalComposite.ArrayMap key1 = createMap(Arrays.asList("field3", "field2"), new Comparable[] { 1, 2 }); + InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { 2, 3 }); assertThat(key1.compareTo(key2), greaterThan(0)); } public void testCompareCompositeKeyNullValueIsSmaller1() { - InternalComposite.ArrayMap key1 = createMap( - Arrays.asList("field1", "field2"), - new Comparable[]{null, 20} - ); - InternalComposite.ArrayMap key2 = createMap( - Arrays.asList("field1", "field2"), - new Comparable[]{1, 2} - ); + InternalComposite.ArrayMap key1 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { null, 20 }); + InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { 1, 2 }); assertThat(key1.compareTo(key2), lessThan(0)); } public void testCompareCompositeKeyNullValueIsSmaller2() { - InternalComposite.ArrayMap key1 = createMap( - Arrays.asList("field1", "field2"), - new Comparable[]{1, 2} - ); - InternalComposite.ArrayMap key2 = createMap( - Arrays.asList("field1", "field2"), - new Comparable[]{null, 20} - ); + InternalComposite.ArrayMap key1 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { 1, 2 }); + InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { null, 20 }); assertThat(key1.compareTo(key2), greaterThan(0)); } public void testCompareCompositeKeyMoreFieldsIsGreater() { - InternalComposite.ArrayMap key1 = createMap( - Arrays.asList("field1", "field2"), - new Comparable[]{1, 2} - ); - InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2", "field3"),new Comparable[]{1, 2, null}); + InternalComposite.ArrayMap key1 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { 1, 2 }); + InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2", "field3"), new Comparable[] { 1, 2, null }); assertThat(key1.compareTo(key2), lessThan(0)); } public void testCompareCompositeKeyLessFieldsIsLesser() { - InternalComposite.ArrayMap key1 = createMap( - Arrays.asList("field1", "field2", "field3"), - new Comparable[]{1, 2, null} - ); - InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2"),new Comparable[]{1, 2}); + InternalComposite.ArrayMap key1 = createMap(Arrays.asList("field1", "field2", "field3"), new Comparable[] { 1, 2, null }); + InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { 1, 2 }); assertThat(key1.compareTo(key2), greaterThan(0)); } public void testCompareCompositeKeyEqual() { - InternalComposite.ArrayMap key1 = createMap( - Arrays.asList("field1", "field2", "field3"), - new Comparable[]{null, 1, 2} - ); - InternalComposite.ArrayMap key2 = createMap( - Arrays.asList("field1", "field2", "field3"), - new Comparable[]{null, 1, 2} - ); + InternalComposite.ArrayMap key1 = createMap(Arrays.asList("field1", "field2", "field3"), new Comparable[] { null, 1, 2 }); + InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2", "field3"), new Comparable[] { null, 1, 2 }); assertThat(key1.compareTo(key1), equalTo(0)); assertThat(key1.equals(key1), is(true)); @@ -362,24 +350,17 @@ public void testCompareCompositeKeyEqual() { } public void testCompareCompositeKeyValuesHaveDifferentTypes() { - InternalComposite.ArrayMap key1 = createMap( - Arrays.asList("field1", "field2"), - new Comparable[]{1, 2} - ); + InternalComposite.ArrayMap key1 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { 1, 2 }); - InternalComposite.ArrayMap key2 = createMap( - Arrays.asList("field1", "field2"), - new Comparable[]{"1", 2} - ); + InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2"), new Comparable[] { "1", 2 }); ClassCastException exception = expectThrows(ClassCastException.class, () -> key1.compareTo(key2)); - assertThat(exception.getMessage(), - containsString("java.lang.String cannot be cast to")); + assertThat(exception.getMessage(), containsString("java.lang.String cannot be cast to")); } public void testFormatObjectChecked() { DocValueFormat weekYearMonth = new DocValueFormat.DateTime( - // YYYY is week-based year. The parser will ignore MM (month-of-year) and dd (day-of-month) + // YYYY is week-based year. The parser will ignore MM (month-of-year) and dd (day-of-month) DateFormatter.forPattern("YYYY-MM-dd"), ZoneOffset.UTC, DateFieldMapper.Resolution.MILLISECONDS @@ -390,13 +371,13 @@ public void testFormatObjectChecked() { public void testFormatDateEpochTimezone() { DocValueFormat epochSecond = new DocValueFormat.DateTime( - // YYYY is week-based year. The parser will ignore MM (month-of-year) and dd (day-of-month) + // YYYY is week-based year. The parser will ignore MM (month-of-year) and dd (day-of-month) DateFormatter.forPattern("epoch_second"), ZoneOffset.ofHours(-2), DateFieldMapper.Resolution.MILLISECONDS ); DocValueFormat epochMillis = new DocValueFormat.DateTime( - // YYYY is week-based year. The parser will ignore MM (month-of-year) and dd (day-of-month) + // YYYY is week-based year. The parser will ignore MM (month-of-year) and dd (day-of-month) DateFormatter.forPattern("epoch_millis"), ZoneOffset.ofHours(-2), DateFieldMapper.Resolution.MILLISECONDS @@ -410,12 +391,7 @@ public void testFormatDateEpochTimezone() { } private InternalComposite.ArrayMap createMap(List fields, Comparable[] values) { - List formats = IntStream.range(0, fields.size()) - .mapToObj(i -> DocValueFormat.RAW).collect(Collectors.toList()); - return new InternalComposite.ArrayMap( - fields, - formats, - values - ); + List formats = IntStream.range(0, fields.size()).mapToObj(i -> DocValueFormat.RAW).collect(Collectors.toList()); + return new InternalComposite.ArrayMap(fields, formats, values); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java index 8d9386f908900..1529236a4c2e1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java @@ -46,8 +46,7 @@ public void testBinarySorted() { assertNotNull(source.createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery())); assertNotNull(source.createSortedDocsProducerOrNull(reader, null)); assertNull(source.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("foo", "bar")))); - assertNull(source.createSortedDocsProducerOrNull(reader, - new TermQuery(new Term("keyword", "toto)")))); + assertNull(source.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("keyword", "toto)")))); source = new BinaryValuesSource( BigArrays.NON_RECYCLING_INSTANCE, @@ -75,15 +74,7 @@ public void testBinarySorted() { assertNull(source.createSortedDocsProducerOrNull(reader, null)); MappedFieldType ip = new IpFieldMapper.IpFieldType("ip"); - source = new BinaryValuesSource( - BigArrays.NON_RECYCLING_INSTANCE, - (b) -> {}, - ip, - context -> null, - DocValueFormat.RAW, - false, - 1, - 1); + source = new BinaryValuesSource(BigArrays.NON_RECYCLING_INSTANCE, (b) -> {}, ip, context -> null, DocValueFormat.RAW, false, 1, 1); assertNull(source.createSortedDocsProducerOrNull(reader, null)); } @@ -92,7 +83,8 @@ public void testOrdinalsSorted() { OrdinalValuesSource source = new OrdinalValuesSource( BigArrays.NON_RECYCLING_INSTANCE, (b) -> {}, - keyword, context -> null, + keyword, + context -> null, DocValueFormat.RAW, false, 1, @@ -103,8 +95,7 @@ public void testOrdinalsSorted() { assertNotNull(source.createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery())); assertNotNull(source.createSortedDocsProducerOrNull(reader, null)); assertNull(source.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("foo", "bar")))); - assertNull(source.createSortedDocsProducerOrNull(reader, - new TermQuery(new Term("keyword", "toto)")))); + assertNull(source.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("keyword", "toto)")))); source = new OrdinalValuesSource( BigArrays.NON_RECYCLING_INSTANCE, @@ -134,16 +125,7 @@ public void testOrdinalsSorted() { assertNull(source.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("foo", "bar")))); final MappedFieldType ip = new IpFieldMapper.IpFieldType("ip"); - source = new OrdinalValuesSource( - BigArrays.NON_RECYCLING_INSTANCE, - (b) -> {}, - ip, - context -> null, - DocValueFormat.RAW, - false, - 1, - 1 - ); + source = new OrdinalValuesSource(BigArrays.NON_RECYCLING_INSTANCE, (b) -> {}, ip, context -> null, DocValueFormat.RAW, false, 1, 1); assertNull(source.createSortedDocsProducerOrNull(reader, null)); assertNull(source.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("foo", "bar")))); } @@ -152,10 +134,10 @@ public void testNumericSorted() { for (NumberFieldMapper.NumberType numberType : NumberFieldMapper.NumberType.values()) { MappedFieldType number = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); final SingleDimensionValuesSource source; - if (numberType == NumberFieldMapper.NumberType.BYTE || - numberType == NumberFieldMapper.NumberType.SHORT || - numberType == NumberFieldMapper.NumberType.INTEGER || - numberType == NumberFieldMapper.NumberType.LONG) { + if (numberType == NumberFieldMapper.NumberType.BYTE + || numberType == NumberFieldMapper.NumberType.SHORT + || numberType == NumberFieldMapper.NumberType.INTEGER + || numberType == NumberFieldMapper.NumberType.LONG) { source = new LongValuesSource( BigArrays.NON_RECYCLING_INSTANCE, @@ -172,13 +154,22 @@ public void testNumericSorted() { assertNotNull(source.createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery())); assertNotNull(source.createSortedDocsProducerOrNull(reader, null)); assertNotNull(source.createSortedDocsProducerOrNull(reader, LongPoint.newRangeQuery("number", 0, 1))); - assertNotNull(source.createSortedDocsProducerOrNull(reader, new IndexOrDocValuesQuery( - LongPoint.newRangeQuery("number", 0, 1), new MatchAllDocsQuery()))); + assertNotNull( + source.createSortedDocsProducerOrNull( + reader, + new IndexOrDocValuesQuery(LongPoint.newRangeQuery("number", 0, 1), new MatchAllDocsQuery()) + ) + ); assertNotNull(source.createSortedDocsProducerOrNull(reader, new DocValuesFieldExistsQuery("number"))); - assertNotNull(source.createSortedDocsProducerOrNull(reader, - new ConstantScoreQuery(new DocValuesFieldExistsQuery("number")))); - assertNotNull(source.createSortedDocsProducerOrNull(reader, new BoostQuery(new IndexOrDocValuesQuery( - LongPoint.newRangeQuery("number", 0, 1), new MatchAllDocsQuery()), 2.0f))); + assertNotNull( + source.createSortedDocsProducerOrNull(reader, new ConstantScoreQuery(new DocValuesFieldExistsQuery("number"))) + ); + assertNotNull( + source.createSortedDocsProducerOrNull( + reader, + new BoostQuery(new IndexOrDocValuesQuery(LongPoint.newRangeQuery("number", 0, 1), new MatchAllDocsQuery()), 2.0f) + ) + ); assertNull(source.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("keyword", "toto)")))); LongValuesSource sourceWithMissing = new LongValuesSource( @@ -189,13 +180,18 @@ public void testNumericSorted() { DocValueFormat.RAW, true, 1, - 1); + 1 + ); assertNull(sourceWithMissing.createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery())); assertNull(sourceWithMissing.createSortedDocsProducerOrNull(reader, null)); assertNull(sourceWithMissing.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("keyword", "toto)")))); assertNull(sourceWithMissing.createSortedDocsProducerOrNull(reader, new DocValuesFieldExistsQuery("number"))); - assertNull(sourceWithMissing.createSortedDocsProducerOrNull(reader, - new ConstantScoreQuery(new DocValuesFieldExistsQuery("number")))); + assertNull( + sourceWithMissing.createSortedDocsProducerOrNull( + reader, + new ConstantScoreQuery(new DocValuesFieldExistsQuery("number")) + ) + ); LongValuesSource sourceRev = new LongValuesSource( BigArrays.NON_RECYCLING_INSTANCE, @@ -209,30 +205,32 @@ public void testNumericSorted() { ); assertNull(sourceRev.createSortedDocsProducerOrNull(reader, null)); assertNull(sourceRev.createSortedDocsProducerOrNull(reader, new DocValuesFieldExistsQuery("number"))); - assertNull(sourceRev.createSortedDocsProducerOrNull(reader, - new ConstantScoreQuery(new DocValuesFieldExistsQuery("number")))); - assertNull(sourceWithMissing.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("keyword", "toto)")))); - } else if (numberType == NumberFieldMapper.NumberType.HALF_FLOAT || - numberType == NumberFieldMapper.NumberType.FLOAT || - numberType == NumberFieldMapper.NumberType.DOUBLE) { - source = new DoubleValuesSource( - BigArrays.NON_RECYCLING_INSTANCE, - number, - context -> null, - DocValueFormat.RAW, - false, - 1, - 1 + assertNull( + sourceRev.createSortedDocsProducerOrNull(reader, new ConstantScoreQuery(new DocValuesFieldExistsQuery("number"))) ); - IndexReader reader = mockIndexReader(1, 1); - assertNull(source.createSortedDocsProducerOrNull(reader, null)); - assertNull(source.createSortedDocsProducerOrNull(reader, new DocValuesFieldExistsQuery("number"))); - assertNull(source.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("keyword", "toto)")))); - assertNull(source.createSortedDocsProducerOrNull(reader, - new ConstantScoreQuery(new DocValuesFieldExistsQuery("number")))); - } else{ - throw new AssertionError ("missing type:" + numberType.typeName()); - } + assertNull(sourceWithMissing.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("keyword", "toto)")))); + } else if (numberType == NumberFieldMapper.NumberType.HALF_FLOAT + || numberType == NumberFieldMapper.NumberType.FLOAT + || numberType == NumberFieldMapper.NumberType.DOUBLE) { + source = new DoubleValuesSource( + BigArrays.NON_RECYCLING_INSTANCE, + number, + context -> null, + DocValueFormat.RAW, + false, + 1, + 1 + ); + IndexReader reader = mockIndexReader(1, 1); + assertNull(source.createSortedDocsProducerOrNull(reader, null)); + assertNull(source.createSortedDocsProducerOrNull(reader, new DocValuesFieldExistsQuery("number"))); + assertNull(source.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("keyword", "toto)")))); + assertNull( + source.createSortedDocsProducerOrNull(reader, new ConstantScoreQuery(new DocValuesFieldExistsQuery("number"))) + ); + } else { + throw new AssertionError("missing type:" + numberType.typeName()); + } assertNull(source.createSortedDocsProducerOrNull(mockIndexReader(100, 49), null)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java index a9f542d14c83c..6b12b9a2c8dc6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java @@ -51,8 +51,7 @@ public void testEmpty() throws Exception { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); QueryBuilder filter = QueryBuilders.termQuery("field", randomAlphaOfLength(5)); FilterAggregationBuilder builder = new FilterAggregationBuilder("test", filter); - InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, - fieldType); + InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); assertEquals(response.getDocCount(), 0); assertFalse(AggregationInspectionHelper.hasValue(response)); indexReader.close(); @@ -71,7 +70,7 @@ public void testRandom() throws Exception { // make sure we have more than one segment to test the merge indexWriter.commit(); } - int value = randomInt(maxTerm-1); + int value = randomInt(maxTerm - 1); expectedBucketCount[value] += 1; document.add(new Field("field", Integer.toString(value), KeywordFieldMapper.Defaults.FIELD_TYPE)); indexWriter.addDocument(document); @@ -120,10 +119,17 @@ public void testBucketComparator() throws IOException { c = agg.bucketComparator("doc_count", SortOrder.ASC); assertThat(c.compare(0, 1), greaterThan(0)); assertThat(c.compare(1, 0), lessThan(0)); - Exception e = expectThrows(IllegalArgumentException.class, () -> - agg.bucketComparator("garbage", randomFrom(SortOrder.values()))); - assertThat(e.getMessage(), equalTo("Ordering on a single-bucket aggregation can only be done on its doc_count. " - + "Either drop the key (a la \"test\") or change it to \"doc_count\" (a la \"test.doc_count\") or \"key\".")); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> agg.bucketComparator("garbage", randomFrom(SortOrder.values())) + ); + assertThat( + e.getMessage(), + equalTo( + "Ordering on a single-bucket aggregation can only be done on its doc_count. " + + "Either drop the key (a la \"test\") or change it to \"doc_count\" (a la \"test.doc_count\") or \"key\"." + ) + ); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java index 075e177fab3fe..9e3b53e3ef004 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java @@ -134,30 +134,26 @@ public void testBuildEmpty() throws IOException { if (askForOtherBucket) { builder.otherBucket(true).otherBucketKey("other"); } - withAggregator( - builder, - new MatchAllDocsQuery(), - iw -> {}, - (searcher, aggregator) -> { - InternalFilters result = (InternalFilters) aggregator.buildEmptyAggregation(); - for (int i = 0; i < filters.length; i++) { - assertThat(result.getBucketByKey(String.valueOf(i)).getDocCount(), equalTo(0L)); - } - if (askForOtherBucket) { - assertThat(result.getBucketByKey("other").getDocCount(), equalTo(0L)); - } else { - assertThat(result.getBucketByKey("other"), nullValue()); - } + withAggregator(builder, new MatchAllDocsQuery(), iw -> {}, (searcher, aggregator) -> { + InternalFilters result = (InternalFilters) aggregator.buildEmptyAggregation(); + for (int i = 0; i < filters.length; i++) { + assertThat(result.getBucketByKey(String.valueOf(i)).getDocCount(), equalTo(0L)); } - ); + if (askForOtherBucket) { + assertThat(result.getBucketByKey("other").getDocCount(), equalTo(0L)); + } else { + assertThat(result.getBucketByKey("other"), nullValue()); + } + }); } public void testNoFilters() throws IOException { - testCase(new FiltersAggregationBuilder("test", new KeyedFilter[0]), new MatchAllDocsQuery(), iw -> { - iw.addDocument(List.of()); - }, (InternalFilters result) -> { - assertThat(result.getBuckets(), hasSize(0)); - }); + testCase( + new FiltersAggregationBuilder("test", new KeyedFilter[0]), + new MatchAllDocsQuery(), + iw -> { iw.addDocument(List.of()); }, + (InternalFilters result) -> { assertThat(result.getBuckets(), hasSize(0)); } + ); } public void testNoFiltersWithSubAggs() throws IOException { @@ -566,9 +562,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} .entry( "filters", matchesList().item( - matchesMap().entry("query", "*:*") - .entry("specialized_for", "match_all") - .entry("results_from_metadata", 0) + matchesMap().entry("query", "*:*").entry("specialized_for", "match_all").entry("results_from_metadata", 0) ) ) ); @@ -816,11 +810,11 @@ public void testSubAggs() throws IOException { new SortedNumericDocValuesField("int", 10) ) ); - /* - * Shuffle the docs so we collect them in a random order which causes - * bad implementations of filter-by-filter aggregation to fail with - * assertion errors while executing. - */ + /* + * Shuffle the docs so we collect them in a random order which causes + * bad implementations of filter-by-filter aggregation to fail with + * assertion errors while executing. + */ Collections.shuffle(docs, random()); debugTestCase( builder, @@ -887,16 +881,15 @@ public void testSubAggsManyDocs() throws IOException { long[] times = new long[] { DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2010-01-02"), DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2020-01-02"), - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2020-01-03"), - }; + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2020-01-03"), }; for (int i = 0; i < 10000; i++) { docs.add(List.of(new LongPoint("test", times[i % 3]), new SortedNumericDocValuesField("int", i))); } - /* - * Shuffle the docs so we collect them in a random order which causes - * bad implementations of filter-by-filter aggregation to fail with - * assertion errors while executing. - */ + /* + * Shuffle the docs so we collect them in a random order which causes + * bad implementations of filter-by-filter aggregation to fail with + * assertion errors while executing. + */ Collections.shuffle(docs, random()); debugTestCase( builder, @@ -960,10 +953,9 @@ public void testSubAggsManyFilters() throws IOException { buckets.add(new KeyedFilter(key, new RangeQueryBuilder("test").from(start).to(end).includeUpper(false))); start = end; } - AggregationBuilder builder = new FiltersAggregationBuilder( - "test", - buckets.toArray(KeyedFilter[]::new) - ).subAggregation(new MaxAggregationBuilder("m").field("int")).subAggregation(new SumAggregationBuilder("s").field("int")); + AggregationBuilder builder = new FiltersAggregationBuilder("test", buckets.toArray(KeyedFilter[]::new)).subAggregation( + new MaxAggregationBuilder("m").field("int") + ).subAggregation(new SumAggregationBuilder("s").field("int")); List> docs = new ArrayList<>(); long[] times = new long[] { formatter.parseMillis("2010-01-02"), @@ -972,11 +964,11 @@ public void testSubAggsManyFilters() throws IOException { for (int i = 0; i < 10000; i++) { docs.add(List.of(new LongPoint("test", times[i % 3]), new SortedNumericDocValuesField("int", i))); } - /* - * Shuffle the docs so we collect them in a random order which causes - * bad implementations of filter-by-filter aggregation to fail with - * assertion errors while executing. - */ + /* + * Shuffle the docs so we collect them in a random order which causes + * bad implementations of filter-by-filter aggregation to fail with + * assertion errors while executing. + */ Collections.shuffle(docs, random()); debugTestCase( builder, @@ -1064,24 +1056,29 @@ public void testDocValuesFieldExistsForNumber() throws IOException { null, false ); - docValuesFieldExistsTestCase(new ExistsQueryBuilder("f"), ft, true, i -> { - return numberType.createFields("f", i, true, true, false); - }); + docValuesFieldExistsTestCase( + new ExistsQueryBuilder("f"), + ft, + true, + i -> { return numberType.createFields("f", i, true, true, false); } + ); } public void testDocValuesFieldExistsForNumberWithoutData() throws IOException { - docValuesFieldExistsNoDataTestCase(new NumberFieldMapper.NumberFieldType( - "f", - randomFrom(NumberFieldMapper.NumberType.values()), - true, - false, - true, - true, - null, - Map.of(), - null, - false - )); + docValuesFieldExistsNoDataTestCase( + new NumberFieldMapper.NumberFieldType( + "f", + randomFrom(NumberFieldMapper.NumberType.values()), + true, + false, + true, + true, + null, + Map.of(), + null, + false + ) + ); } public void testDocValuesFieldExistsForKeyword() throws IOException { @@ -1105,35 +1102,23 @@ private void docValuesFieldExistsTestCase( AggregationBuilder builder = new FiltersAggregationBuilder("test", new KeyedFilter("q1", exists)); // Exists queries convert to MatchNone if this isn't defined FieldNamesFieldMapper.FieldNamesFieldType fnft = new FieldNamesFieldMapper.FieldNamesFieldType(true); - debugTestCase( - builder, - new MatchAllDocsQuery(), - iw -> { - for (int i = 0; i < 10; i++) { - iw.addDocument(buildDocWithField.apply(i)); - } - }, - (InternalFilters result, Class impl, Map> debug) -> { - assertThat(result.getBuckets(), hasSize(1)); - assertThat(result.getBucketByKey("q1").getDocCount(), equalTo(10L)); + debugTestCase(builder, new MatchAllDocsQuery(), iw -> { + for (int i = 0; i < 10; i++) { + iw.addDocument(buildDocWithField.apply(i)); + } + }, (InternalFilters result, Class impl, Map> debug) -> { + assertThat(result.getBuckets(), hasSize(1)); + assertThat(result.getBucketByKey("q1").getDocCount(), equalTo(10L)); - assertThat(impl, equalTo(FilterByFilterAggregator.class)); - MapMatcher expectedFilterDebug = matchesMap().extraOk() - .entry("specialized_for", "docvalues_field_exists") - .entry("results_from_metadata", canUseMetadata ? greaterThan(0) : equalTo(0)); - assertMap( - debug, - matchesMap().entry("test", matchesMap().extraOk().entry("filters", matchesList().item(expectedFilterDebug))) - ); - }, - fieldType, - fnft - ); + assertThat(impl, equalTo(FilterByFilterAggregator.class)); + MapMatcher expectedFilterDebug = matchesMap().extraOk() + .entry("specialized_for", "docvalues_field_exists") + .entry("results_from_metadata", canUseMetadata ? greaterThan(0) : equalTo(0)); + assertMap(debug, matchesMap().entry("test", matchesMap().extraOk().entry("filters", matchesList().item(expectedFilterDebug)))); + }, fieldType, fnft); } - private void docValuesFieldExistsNoDataTestCase( - MappedFieldType fieldType - ) throws IOException { + private void docValuesFieldExistsNoDataTestCase(MappedFieldType fieldType) throws IOException { QueryBuilder exists = new ExistsQueryBuilder(fieldType.name()); AggregationBuilder builder = new FiltersAggregationBuilder("test", new KeyedFilter("q1", exists)); CheckedConsumer buildIndex = iw -> { @@ -1147,9 +1132,10 @@ private void docValuesFieldExistsNoDataTestCase( assertThat(aggregator, instanceOf(FilterByFilterAggregator.class)); Map debug = collectAndGetFilterDebugInfo(searcher, aggregator); - assertMap(debug, matchesMap().extraOk() - .entry("specialized_for", "docvalues_field_exists") - .entry("results_from_metadata", greaterThan(0))); + assertMap( + debug, + matchesMap().extraOk().entry("specialized_for", "docvalues_field_exists").entry("results_from_metadata", greaterThan(0)) + ); }, fieldType, fnft); testCase(builder, new MatchAllDocsQuery(), buildIndex, (InternalFilters result) -> { assertThat(result.getBuckets(), hasSize(1)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java index 4985d254d886b..e5229d21b17a6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilterTests.java @@ -25,8 +25,12 @@ public class InternalFilterTests extends InternalSingleBucketAggregationTestCase { @Override - protected InternalFilter createTestInstance(String name, long docCount, InternalAggregations aggregations, - Map metadata) { + protected InternalFilter createTestInstance( + String name, + long docCount, + InternalAggregations aggregations, + Map metadata + ) { return new InternalFilter(name, docCount, aggregations, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java index f59bcdae55734..64090b6b67c36 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java @@ -63,14 +63,15 @@ protected void assertReduced(InternalFilters reduced, List inpu final Map expectedCounts = new TreeMap<>(); for (InternalFilters input : inputs) { for (InternalFilters.InternalBucket bucket : input.getBuckets()) { - expectedCounts.compute(bucket.getKeyAsString(), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + expectedCounts.compute( + bucket.getKeyAsString(), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount() + ); } } final Map actualCounts = new TreeMap<>(); for (InternalFilters.InternalBucket bucket : reduced.getBuckets()) { - actualCounts.compute(bucket.getKeyAsString(), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + actualCounts.compute(bucket.getKeyAsString(), (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); } assertEquals(expectedCounts, actualCounts); } @@ -86,22 +87,22 @@ protected InternalFilters mutateInstance(InternalFilters instance) { List buckets = instance.getBuckets(); Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - buckets = new ArrayList<>(buckets); - buckets.add(new InternalFilters.InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY, keyed)); - break; - case 2: - default: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + buckets = new ArrayList<>(buckets); + buckets.add(new InternalFilters.InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY, keyed)); + break; + case 2: + default: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; } return new InternalFilters(name, buckets, keyed, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java index 6756fb63a5889..5672890605456 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java @@ -20,8 +20,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.geo.GeoBoundingBox; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.index.mapper.GeoPointFieldMapper; @@ -70,6 +70,7 @@ public abstract class GeoGridAggregatorTestCase * Create a new named {@link GeoGridAggregationBuilder}-derived builder */ protected abstract GeoGridAggregationBuilder createBuilder(String name); + /** * Return a point within the bounds of the tile grid */ @@ -96,27 +97,39 @@ protected List getSupportedValuesSourceTypes() { } public void testNoDocs() throws IOException { - testCase(new MatchAllDocsQuery(), FIELD_NAME, randomPrecision(), null, geoGrid -> { - assertEquals(0, geoGrid.getBuckets().size()); - }, iw -> { - // Intentionally not writing any docs - }); + testCase( + new MatchAllDocsQuery(), + FIELD_NAME, + randomPrecision(), + null, + geoGrid -> { assertEquals(0, geoGrid.getBuckets().size()); }, + iw -> { + // Intentionally not writing any docs + } + ); } public void testUnmapped() throws IOException { - testCase(new MatchAllDocsQuery(), "wrong_field", randomPrecision(), null, geoGrid -> { - assertEquals(0, geoGrid.getBuckets().size()); - }, iw -> { - iw.addDocument(Collections.singleton(new LatLonDocValuesField(FIELD_NAME, 10D, 10D))); - }); + testCase( + new MatchAllDocsQuery(), + "wrong_field", + randomPrecision(), + null, + geoGrid -> { assertEquals(0, geoGrid.getBuckets().size()); }, + iw -> { iw.addDocument(Collections.singleton(new LatLonDocValuesField(FIELD_NAME, 10D, 10D))); } + ); } public void testUnmappedMissing() throws IOException { - GeoGridAggregationBuilder builder = createBuilder("_name") - .field("wrong_field") - .missing("53.69437,6.475031"); - testCase(new MatchAllDocsQuery(), randomPrecision(), null, geoGrid -> assertEquals(1, geoGrid.getBuckets().size()), - iw -> iw.addDocument(Collections.singleton(new LatLonDocValuesField(FIELD_NAME, 10D, 10D))), builder); + GeoGridAggregationBuilder builder = createBuilder("_name").field("wrong_field").missing("53.69437,6.475031"); + testCase( + new MatchAllDocsQuery(), + randomPrecision(), + null, + geoGrid -> assertEquals(1, geoGrid.getBuckets().size()), + iw -> iw.addDocument(Collections.singleton(new LatLonDocValuesField(FIELD_NAME, 10D, 10D))), + builder + ); } @@ -181,7 +194,7 @@ public void testAsSubAgg() throws IOException { .subAggregation(createBuilder("gg").field(FIELD_NAME).precision(precision)); Consumer verify = (terms) -> { Map> actual = new TreeMap<>(); - for (StringTerms.Bucket tb: terms.getBuckets()) { + for (StringTerms.Bucket tb : terms.getBuckets()) { InternalGeoGrid gg = tb.getAggregations().get("gg"); Map sub = new TreeMap<>(); for (InternalGeoGridBucket ggb : gg.getBuckets()) { @@ -205,7 +218,7 @@ private double[] randomLatLng() { lng = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lng)); lat = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat)); - return new double[] {lat, lng}; + return new double[] { lat, lng }; } public void testBounds() throws IOException { @@ -243,13 +256,14 @@ public void testBounds() throws IOException { int in = 0; List docs = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { - Point p = randomPoint(); + Point p = randomPoint(); double x = encodeDecodeLon.apply(p.getLon()); double y = encodeDecodeLat.apply(p.getLat()); Rectangle pointTile = getTile(x, y, precision); - boolean intersectsBounds = boundsTop > pointTile.getMinY() && boundsBottom < pointTile.getMaxY() + boolean intersectsBounds = boundsTop > pointTile.getMinY() + && boundsBottom < pointTile.getMaxY() && (boundsEastLeft < pointTile.getMaxX() && boundsEastRight > pointTile.getMinX() - || (crossesDateline && boundsWestLeft < pointTile.getMaxX() && boundsWestRight > pointTile.getMinX())); + || (crossesDateline && boundsWestLeft < pointTile.getMaxX() && boundsWestRight > pointTile.getMinX())); if (intersectsBounds) { in++; } @@ -275,16 +289,25 @@ public void testBounds() throws IOException { }); } - private void testCase(Query query, String field, int precision, GeoBoundingBox geoBoundingBox, - Consumer> verify, - CheckedConsumer buildIndex) throws IOException { + private void testCase( + Query query, + String field, + int precision, + GeoBoundingBox geoBoundingBox, + Consumer> verify, + CheckedConsumer buildIndex + ) throws IOException { testCase(query, precision, geoBoundingBox, verify, buildIndex, createBuilder("_name").field(field)); } - private void testCase(Query query, int precision, GeoBoundingBox geoBoundingBox, - Consumer> verify, - CheckedConsumer buildIndex, - GeoGridAggregationBuilder aggregationBuilder) throws IOException { + private void testCase( + Query query, + int precision, + GeoBoundingBox geoBoundingBox, + Consumer> verify, + CheckedConsumer buildIndex, + GeoGridAggregationBuilder aggregationBuilder + ) throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); buildIndex.accept(indexWriter); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridTestCase.java index 2221b9c924112..454ad5a9f3ad7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridTestCase.java @@ -18,14 +18,13 @@ import static org.hamcrest.Matchers.equalTo; -public abstract class GeoGridTestCase> - extends InternalMultiBucketAggregationTestCase { +public abstract class GeoGridTestCase> extends + InternalMultiBucketAggregationTestCase { /** * Instantiate a {@link InternalGeoGrid}-derived class using the same parameters as constructor. */ - protected abstract T createInternalGeoGrid(String name, int size, List buckets, - Map metadata); + protected abstract T createInternalGeoGrid(String name, int size, List buckets, Map metadata); /** * Instantiate a {@link InternalGeoGridBucket}-derived class using the same parameters as constructor. @@ -119,33 +118,34 @@ protected T mutateInstance(T instance) { List buckets = instance.getBuckets(); Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - buckets = new ArrayList<>(buckets); - buckets.add( - createInternalGeoGridBucket(randomNonNegativeLong(), randomInt(IndexWriter.MAX_DOCS), InternalAggregations.EMPTY)); - break; - case 2: - size = size + between(1, 10); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + buckets = new ArrayList<>(buckets); + buckets.add( + createInternalGeoGridBucket(randomNonNegativeLong(), randomInt(IndexWriter.MAX_DOCS), InternalAggregations.EMPTY) + ); + break; + case 2: + size = size + between(1, 10); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return createInternalGeoGrid(name, size, buckets, metadata); } public void testCreateFromBuckets() { - InternalGeoGrid original = createTestInstance(); - assertThat(original, equalTo(original.create(original.buckets))); + InternalGeoGrid original = createTestInstance(); + assertThat(original, equalTo(original.create(original.buckets))); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java index d2e99a43751c0..daf97a2a49e23 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java @@ -32,8 +32,10 @@ protected Point randomPoint() { @Override protected GeoBoundingBox randomBBox() { Rectangle rectangle = GeometryTestUtils.randomRectangle(); - return new GeoBoundingBox(new GeoPoint(rectangle.getMaxLat(), rectangle.getMinLon()), - new GeoPoint(rectangle.getMinLat(), rectangle.getMaxLon())); + return new GeoBoundingBox( + new GeoPoint(rectangle.getMaxLat(), rectangle.getMinLon()), + new GeoPoint(rectangle.getMinLat(), rectangle.getMaxLon()) + ); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParserTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParserTests.java index d333a1b461d3e..761f00cb6a4dd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParserTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParserTests.java @@ -7,11 +7,6 @@ */ package org.elasticsearch.search.aggregations.bucket.geogrid; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.lessThanOrEqualTo; - import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; @@ -20,11 +15,18 @@ import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + public class GeoHashGridParserTests extends ESTestCase { public void testParseValidFromInts() throws Exception { int precision = randomIntBetween(1, 12); - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"my_loc\", \"precision\":" + precision + ", \"size\": 500, \"shard_size\": 550}"); + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + "{\"field\":\"my_loc\", \"precision\":" + precision + ", \"size\": 500, \"shard_size\": 550}" + ); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); // can create a factory @@ -33,8 +35,10 @@ public void testParseValidFromInts() throws Exception { public void testParseValidFromStrings() throws Exception { int precision = randomIntBetween(1, 12); - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"my_loc\", \"precision\":\"" + precision + "\", \"size\": \"500\", \"shard_size\": \"550\"}"); + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + "{\"field\":\"my_loc\", \"precision\":\"" + precision + "\", \"size\": \"500\", \"shard_size\": \"550\"}" + ); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); // can create a factory @@ -48,8 +52,10 @@ public void testParseDistanceUnitPrecision() throws Exception { distance = 5600 + randomDouble(); // 5.6cm is approx. smallest distance represented by precision 12 } String distanceString = distance + unit.toString(); - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"my_loc\", \"precision\": \"" + distanceString + "\", \"size\": \"500\", \"shard_size\": \"550\"}"); + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + "{\"field\":\"my_loc\", \"precision\": \"" + distanceString + "\", \"size\": \"500\", \"shard_size\": \"550\"}" + ); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); // can create a factory @@ -60,24 +66,32 @@ public void testParseDistanceUnitPrecision() throws Exception { } public void testParseInvalidUnitPrecision() throws Exception { - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"my_loc\", \"precision\": \"10kg\", \"size\": \"500\", \"shard_size\": \"550\"}"); + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + "{\"field\":\"my_loc\", \"precision\": \"10kg\", \"size\": \"500\", \"shard_size\": \"550\"}" + ); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); - XContentParseException ex = expectThrows(XContentParseException.class, - () -> GeoHashGridAggregationBuilder.PARSER.parse(stParser, "geohash_grid")); + XContentParseException ex = expectThrows( + XContentParseException.class, + () -> GeoHashGridAggregationBuilder.PARSER.parse(stParser, "geohash_grid") + ); assertThat(ex.getMessage(), containsString("[geohash_grid] failed to parse field [precision]")); assertThat(ex.getCause(), instanceOf(NumberFormatException.class)); assertEquals("For input string: \"10kg\"", ex.getCause().getMessage()); } public void testParseDistanceUnitPrecisionTooSmall() throws Exception { - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"my_loc\", \"precision\": \"1cm\", \"size\": \"500\", \"shard_size\": \"550\"}"); + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + "{\"field\":\"my_loc\", \"precision\": \"1cm\", \"size\": \"500\", \"shard_size\": \"550\"}" + ); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); - XContentParseException ex = expectThrows(XContentParseException.class, - () -> GeoHashGridAggregationBuilder.PARSER.parse(stParser, "geohash_grid")); + XContentParseException ex = expectThrows( + XContentParseException.class, + () -> GeoHashGridAggregationBuilder.PARSER.parse(stParser, "geohash_grid") + ); assertThat(ex.getMessage(), containsString("[geohash_grid] failed to parse field [precision]")); assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class)); assertEquals("precision too high [1cm]", ex.getCause().getMessage()); @@ -87,8 +101,10 @@ public void testParseErrorOnBooleanPrecision() throws Exception { XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"my_loc\", \"precision\":false}"); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); - XContentParseException e = expectThrows(XContentParseException.class, - () -> GeoHashGridAggregationBuilder.PARSER.parse(stParser, "geohash_grid")); + XContentParseException e = expectThrows( + XContentParseException.class, + () -> GeoHashGridAggregationBuilder.PARSER.parse(stParser, "geohash_grid") + ); assertThat(e.getMessage(), containsString("[geohash_grid] precision doesn't support values of type: VALUE_BOOLEAN")); } @@ -107,13 +123,24 @@ public void testParseErrorOnPrecisionOutOfRange() throws Exception { public void testParseValidBounds() throws Exception { Rectangle bbox = GeometryTestUtils.randomRectangle(); - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"my_loc\", \"precision\": 5, \"size\": 500, \"shard_size\": 550," + "\"bounds\": { " - + "\"top\": " + bbox.getMaxY() + "," - + "\"bottom\": " + bbox.getMinY() + "," - + "\"left\": " + bbox.getMinX() + "," - + "\"right\": " + bbox.getMaxX() + "}" - + "}"); + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + "{\"field\":\"my_loc\", \"precision\": 5, \"size\": 500, \"shard_size\": 550," + + "\"bounds\": { " + + "\"top\": " + + bbox.getMaxY() + + "," + + "\"bottom\": " + + bbox.getMinY() + + "," + + "\"left\": " + + bbox.getMinX() + + "," + + "\"right\": " + + bbox.getMaxX() + + "}" + + "}" + ); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); // can create a factory diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridTests.java index 573b0dc026b63..493bea979c2c5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridTests.java @@ -16,8 +16,12 @@ public class GeoHashGridTests extends GeoGridTestCase { @Override - protected InternalGeoHashGrid createInternalGeoGrid(String name, int size, List buckets, - Map metadata) { + protected InternalGeoHashGrid createInternalGeoGrid( + String name, + int size, + List buckets, + Map metadata + ) { return new InternalGeoHashGrid(name, size, buckets, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorTests.java index 4ceb0d38c9447..e56ca35675f49 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorTests.java @@ -30,8 +30,10 @@ protected String hashAsString(double lng, double lat, int precision) { @Override protected Point randomPoint() { - return new Point(randomDoubleBetween(GeoUtils.MIN_LON, GeoUtils.MAX_LON, true), - randomDoubleBetween(-GeoTileUtils.LATITUDE_MASK, GeoTileUtils.LATITUDE_MASK, false)); + return new Point( + randomDoubleBetween(GeoUtils.MIN_LON, GeoUtils.MAX_LON, true), + randomDoubleBetween(-GeoTileUtils.LATITUDE_MASK, GeoTileUtils.LATITUDE_MASK, false) + ); } @Override @@ -40,9 +42,12 @@ protected GeoBoundingBox randomBBox() { (b) -> b.top() > GeoTileUtils.LATITUDE_MASK || b.bottom() < -GeoTileUtils.LATITUDE_MASK, () -> { Rectangle rectangle = GeometryTestUtils.randomRectangle(); - return new GeoBoundingBox(new GeoPoint(rectangle.getMaxLat(), rectangle.getMinLon()), - new GeoPoint(rectangle.getMinLat(), rectangle.getMaxLon())); - }); + return new GeoBoundingBox( + new GeoPoint(rectangle.getMaxLat(), rectangle.getMinLon()), + new GeoPoint(rectangle.getMinLat(), rectangle.getMaxLon()) + ); + } + ); // Avoid numerical errors for sub-atomic values double left = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(bbox.left())); double right = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(bbox.right())); @@ -55,13 +60,13 @@ protected GeoBoundingBox randomBBox() { @Override protected Rectangle getTile(double lng, double lat, int precision) { - long tiles = 1 << precision; + long tiles = 1 << precision; int x = GeoTileUtils.getXTile(lng, tiles); int y = GeoTileUtils.getYTile(lat, tiles); Rectangle r1 = GeoTileUtils.toBoundingBox(x, y, precision); Rectangle r2 = GeoTileUtils.toBoundingBox(GeoTileUtils.longEncode(lng, lat, precision)); if (r1.equals(r2) == false) { - int a =0; + int a = 0; } return GeoTileUtils.toBoundingBox(GeoTileUtils.longEncode(lng, lat, precision)); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridParserTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridParserTests.java index 2432e0fdc1e6e..eec1c095e4397 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridParserTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridParserTests.java @@ -7,9 +7,6 @@ */ package org.elasticsearch.search.aggregations.bucket.geogrid; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; - import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; @@ -18,11 +15,16 @@ import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; + public class GeoTileGridParserTests extends ESTestCase { public void testParseValidFromInts() throws Exception { int precision = randomIntBetween(0, GeoTileUtils.MAX_ZOOM); - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"my_loc\", \"precision\":" + precision + ", \"size\": 500, \"shard_size\": 550}"); + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + "{\"field\":\"my_loc\", \"precision\":" + precision + ", \"size\": 500, \"shard_size\": 550}" + ); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); // can create a factory @@ -31,8 +33,10 @@ public void testParseValidFromInts() throws Exception { public void testParseValidFromStrings() throws Exception { int precision = randomIntBetween(0, GeoTileUtils.MAX_ZOOM); - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"my_loc\", \"precision\":\"" + precision + "\", \"size\": \"500\", \"shard_size\": \"550\"}"); + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + "{\"field\":\"my_loc\", \"precision\":\"" + precision + "\", \"size\": \"500\", \"shard_size\": \"550\"}" + ); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); // can create a factory @@ -43,10 +47,14 @@ public void testParseErrorOnBooleanPrecision() throws Exception { XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"my_loc\", \"precision\":false}"); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); - XContentParseException e = expectThrows(XContentParseException.class, - () -> GeoTileGridAggregationBuilder.PARSER.parse(stParser, "geotile_grid")); - assertThat(ExceptionsHelper.stackTrace(e), - containsString("[geotile_grid] precision doesn't support values of type: VALUE_BOOLEAN")); + XContentParseException e = expectThrows( + XContentParseException.class, + () -> GeoTileGridAggregationBuilder.PARSER.parse(stParser, "geotile_grid") + ); + assertThat( + ExceptionsHelper.stackTrace(e), + containsString("[geotile_grid] precision doesn't support values of type: VALUE_BOOLEAN") + ); } public void testParseErrorOnPrecisionOutOfRange() throws Exception { @@ -64,13 +72,24 @@ public void testParseErrorOnPrecisionOutOfRange() throws Exception { public void testParseValidBounds() throws Exception { Rectangle bbox = GeometryTestUtils.randomRectangle(); - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"my_loc\", \"precision\": 5, \"size\": 500, \"shard_size\": 550," + "\"bounds\": { " - + "\"top\": " + bbox.getMaxY() + "," - + "\"bottom\": " + bbox.getMinY() + "," - + "\"left\": " + bbox.getMinX() + "," - + "\"right\": " + bbox.getMaxX() + "}" - + "}"); + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + "{\"field\":\"my_loc\", \"precision\": 5, \"size\": 500, \"shard_size\": 550," + + "\"bounds\": { " + + "\"top\": " + + bbox.getMaxY() + + "," + + "\"bottom\": " + + bbox.getMinY() + + "," + + "\"left\": " + + bbox.getMinX() + + "," + + "\"right\": " + + bbox.getMaxX() + + "}" + + "}" + ); XContentParser.Token token = stParser.nextToken(); assertSame(XContentParser.Token.START_OBJECT, token); // can create a factory diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridTests.java index 691e0e0c57617..f91c0f8439f81 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridTests.java @@ -15,8 +15,12 @@ public class GeoTileGridTests extends GeoGridTestCase { @Override - protected InternalGeoTileGrid createInternalGeoGrid(String name, int size, List buckets, - Map metadata) { + protected InternalGeoTileGrid createInternalGeoGrid( + String name, + int size, + List buckets, + Map metadata + ) { return new InternalGeoTileGrid(name, size, buckets, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java index 27507cc302074..0ba1095937af1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java @@ -52,17 +52,17 @@ public void testLongEncode() { assertEquals(0x0C00000060000000L, longEncode(-20, 100, 3)); assertEquals(0x71127D27C8ACA67AL, longEncode(13, -15, 28)); assertEquals(0x4C0077776003A9ACL, longEncode(-12, 15, 19)); - assertEquals(0x140000024000000EL, longEncode(-328.231870,16.064082, 5)); - assertEquals(0x6436F96B60000000L, longEncode(-590.769588,89.549167, 25)); - assertEquals(0x6411BD6BA0A98359L, longEncode(999.787079,51.830093, 25)); - assertEquals(0x751BD6BBCA983596L, longEncode(999.787079,51.830093, 29)); - assertEquals(0x77CF880A20000000L, longEncode(-557.039740,-632.103969, 29)); - assertEquals(0x7624FA4FA0000000L, longEncode(13,88, 29)); - assertEquals(0x7624FA4FBFFFFFFFL, longEncode(13,-88, 29)); - assertEquals(0x0400000020000000L, longEncode(13,89, 1)); - assertEquals(0x0400000020000001L, longEncode(13,-89, 1)); - assertEquals(0x0400000020000000L, longEncode(13,95, 1)); - assertEquals(0x0400000020000001L, longEncode(13,-95, 1)); + assertEquals(0x140000024000000EL, longEncode(-328.231870, 16.064082, 5)); + assertEquals(0x6436F96B60000000L, longEncode(-590.769588, 89.549167, 25)); + assertEquals(0x6411BD6BA0A98359L, longEncode(999.787079, 51.830093, 25)); + assertEquals(0x751BD6BBCA983596L, longEncode(999.787079, 51.830093, 29)); + assertEquals(0x77CF880A20000000L, longEncode(-557.039740, -632.103969, 29)); + assertEquals(0x7624FA4FA0000000L, longEncode(13, 88, 29)); + assertEquals(0x7624FA4FBFFFFFFFL, longEncode(13, -88, 29)); + assertEquals(0x0400000020000000L, longEncode(13, 89, 1)); + assertEquals(0x0400000020000001L, longEncode(13, -89, 1)); + assertEquals(0x0400000020000000L, longEncode(13, 95, 1)); + assertEquals(0x0400000020000001L, longEncode(13, -95, 1)); expectThrows(IllegalArgumentException.class, () -> longEncode(0, 0, -1)); expectThrows(IllegalArgumentException.class, () -> longEncode(-1, 0, MAX_ZOOM + 1)); @@ -77,17 +77,17 @@ public void testLongEncodeFromString() { assertEquals(0x0C00000060000000L, longEncode(stringEncode(longEncode(-20, 100, 3)))); assertEquals(0x71127D27C8ACA67AL, longEncode(stringEncode(longEncode(13, -15, 28)))); assertEquals(0x4C0077776003A9ACL, longEncode(stringEncode(longEncode(-12, 15, 19)))); - assertEquals(0x140000024000000EL, longEncode(stringEncode(longEncode(-328.231870,16.064082, 5)))); - assertEquals(0x6436F96B60000000L, longEncode(stringEncode(longEncode(-590.769588,89.549167, 25)))); - assertEquals(0x6411BD6BA0A98359L, longEncode(stringEncode(longEncode(999.787079,51.830093, 25)))); - assertEquals(0x751BD6BBCA983596L, longEncode(stringEncode(longEncode(999.787079,51.830093, 29)))); - assertEquals(0x77CF880A20000000L, longEncode(stringEncode(longEncode(-557.039740,-632.103969, 29)))); - assertEquals(0x7624FA4FA0000000L, longEncode(stringEncode(longEncode(13,88, 29)))); - assertEquals(0x7624FA4FBFFFFFFFL, longEncode(stringEncode(longEncode(13,-88, 29)))); - assertEquals(0x0400000020000000L, longEncode(stringEncode(longEncode(13,89, 1)))); - assertEquals(0x0400000020000001L, longEncode(stringEncode(longEncode(13,-89, 1)))); - assertEquals(0x0400000020000000L, longEncode(stringEncode(longEncode(13,95, 1)))); - assertEquals(0x0400000020000001L, longEncode(stringEncode(longEncode(13,-95, 1)))); + assertEquals(0x140000024000000EL, longEncode(stringEncode(longEncode(-328.231870, 16.064082, 5)))); + assertEquals(0x6436F96B60000000L, longEncode(stringEncode(longEncode(-590.769588, 89.549167, 25)))); + assertEquals(0x6411BD6BA0A98359L, longEncode(stringEncode(longEncode(999.787079, 51.830093, 25)))); + assertEquals(0x751BD6BBCA983596L, longEncode(stringEncode(longEncode(999.787079, 51.830093, 29)))); + assertEquals(0x77CF880A20000000L, longEncode(stringEncode(longEncode(-557.039740, -632.103969, 29)))); + assertEquals(0x7624FA4FA0000000L, longEncode(stringEncode(longEncode(13, 88, 29)))); + assertEquals(0x7624FA4FBFFFFFFFL, longEncode(stringEncode(longEncode(13, -88, 29)))); + assertEquals(0x0400000020000000L, longEncode(stringEncode(longEncode(13, 89, 1)))); + assertEquals(0x0400000020000001L, longEncode(stringEncode(longEncode(13, -89, 1)))); + assertEquals(0x0400000020000000L, longEncode(stringEncode(longEncode(13, 95, 1)))); + assertEquals(0x0400000020000001L, longEncode(stringEncode(longEncode(13, -95, 1)))); expectThrows(IllegalArgumentException.class, () -> longEncode("12/asdf/1")); expectThrows(IllegalArgumentException.class, () -> longEncode("foo")); @@ -107,7 +107,7 @@ public void testHashToGeoPoint() { assertGeoPointEquals(keyToGeoPoint("29/536870000/10"), 179.99938879162073, 85.05112817241982); assertGeoPointEquals(keyToGeoPoint("29/10/536870000"), -179.99999295920134, -85.0510760525731); - //noinspection ConstantConditions + // noinspection ConstantConditions expectThrows(NullPointerException.class, () -> keyToGeoPoint(null)); expectThrows(IllegalArgumentException.class, () -> keyToGeoPoint("")); expectThrows(IllegalArgumentException.class, () -> keyToGeoPoint("a")); @@ -214,9 +214,7 @@ public void testSingularityAtPoles() { double minLat = -GeoTileUtils.LATITUDE_MASK; double maxLat = GeoTileUtils.LATITUDE_MASK; double lon = randomIntBetween(-180, 180); - double lat = randomBoolean() - ? randomDoubleBetween(-90, minLat, true) - : randomDoubleBetween(maxLat, 90, true); + double lat = randomBoolean() ? randomDoubleBetween(-90, minLat, true) : randomDoubleBetween(maxLat, 90, true); double clippedLat = Math.min(Math.max(lat, minLat), maxLat); int zoom = randomIntBetween(0, MAX_ZOOM); String tileIndex = stringEncode(longEncode(lon, lat, zoom)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java index 459048387e3ef..5237ebe400e87 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java @@ -17,8 +17,12 @@ public class InternalGlobalTests extends InternalSingleBucketAggregationTestCase { @Override - protected InternalGlobal createTestInstance(String name, long docCount, InternalAggregations aggregations, - Map metadata) { + protected InternalGlobal createTestInstance( + String name, + long docCount, + InternalAggregations aggregations, + Map metadata + ) { return new InternalGlobal(name, docCount, aggregations, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilderTests.java index 8ba030007f8d6..fcfa6cf092f9f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilderTests.java @@ -21,8 +21,10 @@ public class AutoDateHistogramAggregationBuilderTests extends ESTestCase { public void testInvalidInterval() { AutoDateHistogramAggregationBuilder builder = new AutoDateHistogramAggregationBuilder("name"); - IllegalArgumentException wrongIntervalName = expectThrows(IllegalArgumentException.class, - () -> builder.setMinimumIntervalExpression("foobar")); + IllegalArgumentException wrongIntervalName = expectThrows( + IllegalArgumentException.class, + () -> builder.setMinimumIntervalExpression("foobar") + ); assertTrue(wrongIntervalName.getMessage().startsWith("minimum_interval must be one of")); } @@ -44,8 +46,7 @@ public void testBuildRoundingsWithMinIntervalOfAYear() { } public void testRoundingsMatchAllowedIntervals() { - AutoDateHistogramAggregationBuilder.RoundingInfo[] roundings = AutoDateHistogramAggregationBuilder.buildRoundings( - null, "second"); + AutoDateHistogramAggregationBuilder.RoundingInfo[] roundings = AutoDateHistogramAggregationBuilder.buildRoundings(null, "second"); Set actualDateTimeUnits = Arrays.stream(roundings) .map(AutoDateHistogramAggregationBuilder.RoundingInfo::getDateTimeUnit) .collect(Collectors.toSet()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java index 3591c885f1914..71bbf8a4f7704 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java @@ -85,12 +85,15 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes ZonedDateTime.of(2015, 6, 24, 13, 47, 43, 0, ZoneOffset.UTC), ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC), ZonedDateTime.of(2016, 3, 4, 17, 9, 50, 0, ZoneOffset.UTC), - ZonedDateTime.of(2017, 12, 12, 22, 55, 46, 0, ZoneOffset.UTC)); + ZonedDateTime.of(2017, 12, 12, 22, 55, 46, 0, ZoneOffset.UTC) + ); private static final Query DEFAULT_QUERY = new MatchAllDocsQuery(); public void testMatchNoDocs() throws IOException { - testSearchCase(new MatchNoDocsQuery(), DATES_WITH_TIME, + testSearchCase( + new MatchNoDocsQuery(), + DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), histogram -> { assertEquals(0, histogram.getBuckets().size()); @@ -109,15 +112,20 @@ public void testMatchAllDocs() throws IOException { expectedDocCount.put("2017-01-01T00:00:00.000Z", 1); expectedDocCount.put("2011-01-01T00:00:00.000Z", 0); expectedDocCount.put("2014-01-01T00:00:00.000Z", 0); - testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME, + testSearchCase( + DEFAULT_QUERY, + DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); } public void testSubAggregations() throws IOException { - testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME, - aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD) + testSearchCase( + DEFAULT_QUERY, + DATES_WITH_TIME, + aggregation -> aggregation.setNumBuckets(8) + .field(DATE_FIELD) .subAggregation(AggregationBuilders.stats("stats").field(DATE_FIELD)), histogram -> { assertTrue(AggregationInspectionHelper.hasValue(histogram)); @@ -195,13 +203,17 @@ public void testSubAggregations() throws IOException { assertEquals("2017-12-12T22:55:46.000Z", stats.getMaxAsString()); assertEquals(1L, stats.getCount()); assertTrue(AggregationInspectionHelper.hasValue(stats)); - }); + } + ); } public void testAsSubAgg() throws IOException { - AggregationBuilder builder = new TermsAggregationBuilder("k1").field("k1").subAggregation( - new AutoDateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE).setNumBuckets(3).subAggregation( - new MaxAggregationBuilder("max").field("n"))); + AggregationBuilder builder = new TermsAggregationBuilder("k1").field("k1") + .subAggregation( + new AutoDateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE) + .setNumBuckets(3) + .subAggregation(new MaxAggregationBuilder("max").field("n")) + ); asSubAggTestCase(builder, (StringTerms terms) -> { StringTerms.Bucket a = terms.getBucketByKey("a"); InternalAutoDateHistogram adh = a.getAggregations().get("dh"); @@ -271,14 +283,16 @@ public void testAsSubAggWithIncreasedRounding() throws IOException { BytesRef cBytes = new BytesRef("c"); int n = 0; for (long d = start; d < end; d += anHour) { - docs.add(List.of( - new SortedNumericDocValuesField(AGGREGABLE_DATE, d), - new SortedSetDocValuesField("k1", aBytes), - new SortedSetDocValuesField("k1", d < useC ? bBytes : cBytes), - new Field("k1", aBytes, KeywordFieldMapper.Defaults.FIELD_TYPE), - new Field("k1", d < useC ? bBytes : cBytes, KeywordFieldMapper.Defaults.FIELD_TYPE), - new SortedNumericDocValuesField("n", n++) - )); + docs.add( + List.of( + new SortedNumericDocValuesField(AGGREGABLE_DATE, d), + new SortedSetDocValuesField("k1", aBytes), + new SortedSetDocValuesField("k1", d < useC ? bBytes : cBytes), + new Field("k1", aBytes, KeywordFieldMapper.Defaults.FIELD_TYPE), + new Field("k1", d < useC ? bBytes : cBytes, KeywordFieldMapper.Defaults.FIELD_TYPE), + new SortedNumericDocValuesField("n", n++) + ) + ); } /* * Intentionally add all documents at once to put them on the @@ -286,9 +300,12 @@ public void testAsSubAggWithIncreasedRounding() throws IOException { */ iw.addDocuments(docs); }; - AggregationBuilder builder = new TermsAggregationBuilder("k1").field("k1").subAggregation( - new AutoDateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE).setNumBuckets(4).subAggregation( - new MaxAggregationBuilder("max").field("n"))); + AggregationBuilder builder = new TermsAggregationBuilder("k1").field("k1") + .subAggregation( + new AutoDateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE) + .setNumBuckets(4) + .subAggregation(new MaxAggregationBuilder("max").field("n")) + ); asSubAggTestCase(builder, buildIndex, (StringTerms terms) -> { StringTerms.Bucket a = terms.getBucketByKey("a"); InternalAutoDateHistogram adh = a.getAggregations().get("dh"); @@ -337,10 +354,7 @@ public void testAsSubAggInManyBuckets() throws IOException { List> docs = new ArrayList<>(); int n = 0; for (long d = start; d < end; d += anHour) { - docs.add(List.of( - new SortedNumericDocValuesField(AGGREGABLE_DATE, d), - new SortedNumericDocValuesField("n", n % 100) - )); + docs.add(List.of(new SortedNumericDocValuesField(AGGREGABLE_DATE, d), new SortedNumericDocValuesField("n", n % 100))); n++; } /* @@ -349,12 +363,16 @@ public void testAsSubAggInManyBuckets() throws IOException { */ iw.addDocuments(docs); }; - AggregationBuilder builder = new HistogramAggregationBuilder("n").field("n").interval(1).subAggregation( - new AutoDateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE).setNumBuckets(4).subAggregation( - new MaxAggregationBuilder("max").field("n"))); + AggregationBuilder builder = new HistogramAggregationBuilder("n").field("n") + .interval(1) + .subAggregation( + new AutoDateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE) + .setNumBuckets(4) + .subAggregation(new MaxAggregationBuilder("max").field("n")) + ); asSubAggTestCase(builder, buildIndex, (InternalHistogram histo) -> { assertThat(histo.getBuckets(), hasSize(100)); - for (int n = 0; n < 100; n ++) { + for (int n = 0; n < 100; n++) { InternalHistogram.Bucket b = histo.getBuckets().get(n); InternalAutoDateHistogram dh = b.getAggregations().get("dh"); assertThat(bucketCountsAsMap(dh), hasEntry(equalTo("2020-01-01T00:00:00.000Z"), either(equalTo(21)).or(equalTo(22)))); @@ -375,61 +393,48 @@ public void testNoDocs() throws IOException { final List dates = Collections.emptyList(); final Consumer aggregation = agg -> agg.setNumBuckets(10).field(DATE_FIELD); - testSearchCase(DEFAULT_QUERY, dates, aggregation, - histogram -> { - assertEquals(0, histogram.getBuckets().size()); - assertFalse(AggregationInspectionHelper.hasValue(histogram)); - } - ); - testSearchCase(DEFAULT_QUERY, dates, aggregation, - histogram -> { - assertEquals(0, histogram.getBuckets().size()); - assertFalse(AggregationInspectionHelper.hasValue(histogram)); - } - ); + testSearchCase(DEFAULT_QUERY, dates, aggregation, histogram -> { + assertEquals(0, histogram.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(histogram)); + }); + testSearchCase(DEFAULT_QUERY, dates, aggregation, histogram -> { + assertEquals(0, histogram.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(histogram)); + }); } public void testAggregateWrongField() throws IOException { - AutoDateHistogramAggregationBuilder aggregation = new AutoDateHistogramAggregationBuilder("_name"). - setNumBuckets(10).field("bogus_bogus"); + AutoDateHistogramAggregationBuilder aggregation = new AutoDateHistogramAggregationBuilder("_name").setNumBuckets(10) + .field("bogus_bogus"); final DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType("date_field"); - testCase(aggregation, DEFAULT_QUERY, - iw -> {}, - (Consumer) histogram -> { - assertEquals(0, histogram.getBuckets().size()); - assertFalse(AggregationInspectionHelper.hasValue(histogram)); - }, fieldType); + testCase(aggregation, DEFAULT_QUERY, iw -> {}, (Consumer) histogram -> { + assertEquals(0, histogram.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(histogram)); + }, fieldType); } public void testBooleanFieldDeprecated() throws IOException { final String fieldName = "bogusBoolean"; - testCase( - new AutoDateHistogramAggregationBuilder("name").field(fieldName), - new MatchAllDocsQuery(), - iw -> { - Document d = new Document(); - d.add(new SortedNumericDocValuesField(fieldName, 0)); - iw.addDocument(d); - }, - a -> {}, - new BooleanFieldMapper.BooleanFieldType(fieldName) - ); + testCase(new AutoDateHistogramAggregationBuilder("name").field(fieldName), new MatchAllDocsQuery(), iw -> { + Document d = new Document(); + d.add(new SortedNumericDocValuesField(fieldName, 0)); + iw.addDocument(d); + }, a -> {}, new BooleanFieldMapper.BooleanFieldType(fieldName)); assertWarnings("Running AutoIntervalDateHistogram aggregations on [boolean] fields is deprecated"); } public void testUnmappedMissing() throws IOException { - AutoDateHistogramAggregationBuilder aggregation = new AutoDateHistogramAggregationBuilder("_name"). - setNumBuckets(10).field("bogus_bogus").missing("2017-12-12"); + AutoDateHistogramAggregationBuilder aggregation = new AutoDateHistogramAggregationBuilder("_name").setNumBuckets(10) + .field("bogus_bogus") + .missing("2017-12-12"); final DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType("date_field"); - testCase(aggregation, DEFAULT_QUERY, - iw -> {}, - (Consumer) histogram -> { - assertEquals(0, histogram.getBuckets().size()); - assertFalse(AggregationInspectionHelper.hasValue(histogram)); + testCase(aggregation, DEFAULT_QUERY, iw -> {}, (Consumer) histogram -> { + assertEquals(0, histogram.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(histogram)); }, fieldType); } @@ -437,21 +442,17 @@ public void testIntervalYear() throws IOException { final long start = LocalDate.of(2015, 1, 1).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli(); final long end = LocalDate.of(2017, 12, 31).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli(); final Query rangeQuery = LongPoint.newRangeQuery(INSTANT_FIELD, start, end); - testSearchCase(rangeQuery, DATES_WITH_TIME, - aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), - histogram -> { - final ZonedDateTime startDate = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - final Map expectedDocCount = new HashMap<>(); - expectedDocCount.put(startDate, 3); - expectedDocCount.put(startDate.plusYears(1), 1); - expectedDocCount.put(startDate.plusYears(2), 1); - final List buckets = histogram.getBuckets(); - assertEquals(expectedDocCount.size(), buckets.size()); - buckets.forEach(bucket -> - assertEquals(expectedDocCount.getOrDefault(bucket.getKey(), 0).longValue(), bucket.getDocCount())); - assertTrue(AggregationInspectionHelper.hasValue(histogram)); - } - ); + testSearchCase(rangeQuery, DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), histogram -> { + final ZonedDateTime startDate = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + final Map expectedDocCount = new HashMap<>(); + expectedDocCount.put(startDate, 3); + expectedDocCount.put(startDate.plusYears(1), 1); + expectedDocCount.put(startDate.plusYears(2), 1); + final List buckets = histogram.getBuckets(); + assertEquals(expectedDocCount.size(), buckets.size()); + buckets.forEach(bucket -> assertEquals(expectedDocCount.getOrDefault(bucket.getKey(), 0).longValue(), bucket.getDocCount())); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); + }); } public void testIntervalMonth() throws IOException { @@ -461,24 +462,31 @@ public void testIntervalMonth() throws IOException { ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 3, 4, 0, 0, 0, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 3, 5, 0, 0, 0, 0, ZoneOffset.UTC), - ZonedDateTime.of(2017, 3, 6, 0, 0, 0, 0, ZoneOffset.UTC)); + ZonedDateTime.of(2017, 3, 6, 0, 0, 0, 0, ZoneOffset.UTC) + ); Map expectedDocCount = new TreeMap<>(); expectedDocCount.put("2017-01-01T00:00:00.000Z", 1); expectedDocCount.put("2017-02-01T00:00:00.000Z", 2); expectedDocCount.put("2017-03-01T00:00:00.000Z", 3); - testSearchCase(DEFAULT_QUERY, datesForMonthInterval, + testSearchCase( + DEFAULT_QUERY, + datesForMonthInterval, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); } public void testWithLargeNumberOfBuckets() { - final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME, + final IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> testSearchCase( + DEFAULT_QUERY, + DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(MultiBucketConsumerService.DEFAULT_MAX_BUCKETS + 1).field(DATE_FIELD), // since an exception is thrown, this assertion won't be invoked. histogram -> fail() - )); + ) + ); assertThat(exception.getMessage(), Matchers.containsString("must be less than")); } @@ -490,14 +498,17 @@ public void testIntervalDay() throws IOException { ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), - ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC)); + ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC) + ); Map expectedDocCount = new TreeMap<>(); expectedDocCount.put("2017-02-01T00:00:00.000Z", 1); expectedDocCount.put("2017-02-02T00:00:00.000Z", 2); expectedDocCount.put("2017-02-03T00:00:00.000Z", 3); expectedDocCount.put("2017-02-05T00:00:00.000Z", 1); expectedDocCount.put("2017-02-04T00:00:00.000Z", 0); - testSearchCase(DEFAULT_QUERY, datesForDayInterval, + testSearchCase( + DEFAULT_QUERY, + datesForDayInterval, aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); @@ -511,17 +522,20 @@ public void testIntervalDayWithTZ() throws IOException { ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), - ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC)); + ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC) + ); Map expectedDocCount = new TreeMap<>(); expectedDocCount.put("2017-01-31T00:00:00.000-01:00", 1); expectedDocCount.put("2017-02-01T00:00:00.000-01:00", 2); expectedDocCount.put("2017-02-02T00:00:00.000-01:00", 3); expectedDocCount.put("2017-02-04T00:00:00.000-01:00", 1); expectedDocCount.put("2017-02-03T00:00:00.000-01:00", 0); - testSearchCase(DEFAULT_QUERY, datesForDayInterval, + testSearchCase( + DEFAULT_QUERY, + datesForDayInterval, aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) - ); + ); } public void testIntervalHour() throws IOException { @@ -535,7 +549,8 @@ public void testIntervalHour() throws IOException { ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC), - ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC)); + ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC) + ); Map expectedDocCount = new TreeMap<>(); expectedDocCount.put("2017-02-01T09:00:00.000Z", 2); expectedDocCount.put("2017-02-01T10:00:00.000Z", 1); @@ -546,7 +561,9 @@ public void testIntervalHour() throws IOException { expectedDocCount.put("2017-02-01T16:00:00.000Z", 3); expectedDocCount.put("2017-02-01T11:00:00.000Z", 0); expectedDocCount.put("2017-02-01T12:00:00.000Z", 0); - testSearchCase(DEFAULT_QUERY, datesForHourInterval, + testSearchCase( + DEFAULT_QUERY, + datesForHourInterval, aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); @@ -554,7 +571,9 @@ public void testIntervalHour() throws IOException { expectedDocCount.put("2017-02-01T09:00:00.000Z", 3); expectedDocCount.put("2017-02-01T12:00:00.000Z", 3); expectedDocCount.put("2017-02-01T15:00:00.000Z", 4); - testSearchCase(DEFAULT_QUERY, datesForHourInterval, + testSearchCase( + DEFAULT_QUERY, + datesForHourInterval, aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); @@ -571,7 +590,8 @@ public void testIntervalHourWithTZ() throws IOException { ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC), - ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC)); + ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC) + ); Map expectedDocCount = new TreeMap<>(); expectedDocCount.put("2017-02-01T08:00:00.000-01:00", 2); expectedDocCount.put("2017-02-01T09:00:00.000-01:00", 1); @@ -581,7 +601,9 @@ public void testIntervalHourWithTZ() throws IOException { expectedDocCount.put("2017-02-01T15:00:00.000-01:00", 3); expectedDocCount.put("2017-02-01T10:00:00.000-01:00", 0); expectedDocCount.put("2017-02-01T11:00:00.000-01:00", 0); - testSearchCase(DEFAULT_QUERY, datesForHourInterval, + testSearchCase( + DEFAULT_QUERY, + datesForHourInterval, aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); @@ -602,7 +624,9 @@ public void testRandomSecondIntervals() throws IOException { bucketsToExpectedDocCountMap.put(10, 30); bucketsToExpectedDocCountMap.put(3, 60); final Map.Entry randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet()); - testSearchCase(DEFAULT_QUERY, dataset, + testSearchCase( + DEFAULT_QUERY, + dataset, aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -613,7 +637,8 @@ public void testRandomSecondIntervals() throws IOException { final Histogram.Bucket bucket = buckets.get(randomIndex); assertEquals(startDate.plusSeconds(randomIndex * expectedDocCount), bucket.getKey()); assertEquals(expectedDocCount, bucket.getDocCount()); - }); + } + ); } public void testRandomMinuteIntervals() throws IOException { @@ -631,7 +656,9 @@ public void testRandomMinuteIntervals() throws IOException { bucketsToExpectedDocCountMap.put(10, 30); bucketsToExpectedDocCountMap.put(3, 60); final Map.Entry randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet()); - testSearchCase(DEFAULT_QUERY, dataset, + testSearchCase( + DEFAULT_QUERY, + dataset, aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -642,7 +669,8 @@ public void testRandomMinuteIntervals() throws IOException { final Histogram.Bucket bucket = buckets.get(randomIndex); assertEquals(startDate.plusMinutes(randomIndex * expectedDocCount), bucket.getKey()); assertEquals(expectedDocCount, bucket.getDocCount()); - }); + } + ); } public void testRandomHourIntervals() throws IOException { @@ -659,7 +687,9 @@ public void testRandomHourIntervals() throws IOException { bucketsToExpectedDocCountMap.put(12, 12); bucketsToExpectedDocCountMap.put(3, 24); final Map.Entry randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet()); - testSearchCase(DEFAULT_QUERY, dataset, + testSearchCase( + DEFAULT_QUERY, + dataset, aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -670,7 +700,8 @@ public void testRandomHourIntervals() throws IOException { final Histogram.Bucket bucket = buckets.get(randomIndex); assertEquals(startDate.plusHours(randomIndex * expectedDocCount), bucket.getKey()); assertEquals(expectedDocCount, bucket.getDocCount()); - }); + } + ); } public void testRandomDayIntervals() throws IOException { @@ -683,39 +714,33 @@ public void testRandomDayIntervals() throws IOException { } final int randomChoice = randomIntBetween(1, 3); if (randomChoice == 1) { - testSearchCase(DEFAULT_QUERY, dataset, - aggregation -> aggregation.setNumBuckets(length).field(DATE_FIELD), - histogram -> { - final List buckets = histogram.getBuckets(); - assertEquals(length, buckets.size()); - final int randomIndex = randomInt(length - 1); - final Histogram.Bucket bucket = buckets.get(randomIndex); - assertEquals(startDate.plusDays(randomIndex), bucket.getKey()); - assertEquals(1, bucket.getDocCount()); - }); + testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(length).field(DATE_FIELD), histogram -> { + final List buckets = histogram.getBuckets(); + assertEquals(length, buckets.size()); + final int randomIndex = randomInt(length - 1); + final Histogram.Bucket bucket = buckets.get(randomIndex); + assertEquals(startDate.plusDays(randomIndex), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + }); } else if (randomChoice == 2) { - testSearchCase(DEFAULT_QUERY, dataset, - aggregation -> aggregation.setNumBuckets(60).field(DATE_FIELD), - histogram -> { - final List buckets = histogram.getBuckets(); - final int expectedDocCount = 7; - assertEquals(20, buckets.size()); - final int randomIndex = randomInt(19); - final Histogram.Bucket bucket = buckets.get(randomIndex); - assertEquals(startDate.plusDays(randomIndex * expectedDocCount), bucket.getKey()); - assertEquals(expectedDocCount, bucket.getDocCount()); - }); + testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(60).field(DATE_FIELD), histogram -> { + final List buckets = histogram.getBuckets(); + final int expectedDocCount = 7; + assertEquals(20, buckets.size()); + final int randomIndex = randomInt(19); + final Histogram.Bucket bucket = buckets.get(randomIndex); + assertEquals(startDate.plusDays(randomIndex * expectedDocCount), bucket.getKey()); + assertEquals(expectedDocCount, bucket.getDocCount()); + }); } else if (randomChoice == 3) { - testSearchCase(DEFAULT_QUERY, dataset, - aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), - histogram -> { - final List buckets = histogram.getBuckets(); - assertEquals(5, buckets.size()); - final int randomIndex = randomInt(2); - final Histogram.Bucket bucket = buckets.get(randomIndex); - assertEquals(startDate.plusMonths(randomIndex), bucket.getKey()); - assertEquals(YearMonth.from(startDate.plusMonths(randomIndex)).lengthOfMonth(), bucket.getDocCount()); - }); + testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), histogram -> { + final List buckets = histogram.getBuckets(); + assertEquals(5, buckets.size()); + final int randomIndex = randomInt(2); + final Histogram.Bucket bucket = buckets.get(randomIndex); + assertEquals(startDate.plusMonths(randomIndex), bucket.getKey()); + assertEquals(YearMonth.from(startDate.plusMonths(randomIndex)).lengthOfMonth(), bucket.getDocCount()); + }); } } @@ -732,7 +757,9 @@ public void testRandomMonthIntervals() throws IOException { bucketsToExpectedDocCountMap.put(30, 3); bucketsToExpectedDocCountMap.put(6, 12); final Map.Entry randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet()); - testSearchCase(DEFAULT_QUERY, dataset, + testSearchCase( + DEFAULT_QUERY, + dataset, aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -743,7 +770,8 @@ public void testRandomMonthIntervals() throws IOException { final Histogram.Bucket bucket = buckets.get(randomIndex); assertEquals(startDate.plusMonths(randomIndex * expectedDocCount), bucket.getKey()); assertEquals(expectedDocCount, bucket.getDocCount()); - }); + } + ); } public void testRandomYearIntervals() throws IOException { @@ -762,7 +790,9 @@ public void testRandomYearIntervals() throws IOException { bucketsToExpectedDocCountMap.put(10, 50); bucketsToExpectedDocCountMap.put(5, 100); final Map.Entry randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet()); - testSearchCase(DEFAULT_QUERY, dataset, + testSearchCase( + DEFAULT_QUERY, + dataset, aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -773,7 +803,8 @@ public void testRandomYearIntervals() throws IOException { final Histogram.Bucket bucket = buckets.get(randomIndex); assertEquals(startDate.plusYears(randomIndex * expectedDocCount), bucket.getKey()); assertEquals(expectedDocCount, bucket.getDocCount()); - }); + } + ); } public void testIntervalMinute() throws IOException { @@ -782,7 +813,8 @@ public void testIntervalMinute() throws IOException { ZonedDateTime.of(2017, 2, 1, 9, 2, 59, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 1, 9, 15, 37, 0, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 1, 9, 16, 4, 0, ZoneOffset.UTC), - ZonedDateTime.of(2017, 2, 1, 9, 16, 42, 0, ZoneOffset.UTC)); + ZonedDateTime.of(2017, 2, 1, 9, 16, 42, 0, ZoneOffset.UTC) + ); Map skeletonDocCount = new TreeMap<>(); skeletonDocCount.put("2017-02-01T09:02:00.000Z", 2); skeletonDocCount.put("2017-02-01T09:15:00.000Z", 1); @@ -791,7 +823,9 @@ public void testIntervalMinute() throws IOException { fullDocCount.put("2017-02-01T09:02:00.000Z", 2); fullDocCount.put("2017-02-01T09:07:00.000Z", 0); fullDocCount.put("2017-02-01T09:12:00.000Z", 3); - testSearchCase(DEFAULT_QUERY, datesForMinuteInterval, + testSearchCase( + DEFAULT_QUERY, + datesForMinuteInterval, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(fullDocCount)) ); @@ -800,7 +834,9 @@ public void testIntervalMinute() throws IOException { for (int minute = 3; minute < 15; minute++) { fullDocCount.put(String.format(Locale.ROOT, "2017-02-01T09:%02d:00.000Z", minute), 0); } - testSearchCase(DEFAULT_QUERY, datesForMinuteInterval, + testSearchCase( + DEFAULT_QUERY, + datesForMinuteInterval, aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(fullDocCount)) ); @@ -813,7 +849,8 @@ public void testIntervalSecond() throws IOException { ZonedDateTime.of(2017, 2, 1, 0, 0, 7, 74, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 688, ZoneOffset.UTC), ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 210, ZoneOffset.UTC), - ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 380, ZoneOffset.UTC)); + ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 380, ZoneOffset.UTC) + ); Map expectedDocCount = new TreeMap<>(); expectedDocCount.put("2017-02-01T00:00:05.000Z", 1); expectedDocCount.put("2017-02-01T00:00:07.000Z", 2); @@ -822,18 +859,27 @@ public void testIntervalSecond() throws IOException { expectedDocCount.put("2017-02-01T00:00:08.000Z", 0); expectedDocCount.put("2017-02-01T00:00:09.000Z", 0); expectedDocCount.put("2017-02-01T00:00:10.000Z", 0); - testSearchCase(DEFAULT_QUERY, datesForSecondInterval, + testSearchCase( + DEFAULT_QUERY, + datesForSecondInterval, aggregation -> aggregation.setNumBuckets(7).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); } public void testWithPipelineReductions() throws IOException { - testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME, - aggregation -> aggregation.setNumBuckets(1).field(DATE_FIELD) - .subAggregation(AggregationBuilders.histogram("histo").field(NUMERIC_FIELD).interval(1) - .subAggregation(AggregationBuilders.max("max").field(NUMERIC_FIELD)) - .subAggregation(new DerivativePipelineAggregationBuilder("deriv", "max"))), + testSearchCase( + DEFAULT_QUERY, + DATES_WITH_TIME, + aggregation -> aggregation.setNumBuckets(1) + .field(DATE_FIELD) + .subAggregation( + AggregationBuilders.histogram("histo") + .field(NUMERIC_FIELD) + .interval(1) + .subAggregation(AggregationBuilders.max("max").field(NUMERIC_FIELD)) + .subAggregation(new DerivativePipelineAggregationBuilder("deriv", "max")) + ), histogram -> { assertTrue(AggregationInspectionHelper.hasValue(histogram)); final List buckets = histogram.getBuckets(); @@ -846,23 +892,23 @@ public void testWithPipelineReductions() throws IOException { InternalHistogram histo = (InternalHistogram) bucket.getAggregations().asList().get(0); assertThat(histo.getBuckets().size(), equalTo(10)); for (int i = 0; i < 10; i++) { - assertThat(histo.getBuckets().get(i).key, equalTo((double)i)); - assertThat(((InternalMax)histo.getBuckets().get(i).aggregations.get("max")).getValue(), equalTo((double)i)); + assertThat(histo.getBuckets().get(i).key, equalTo((double) i)); + assertThat(((InternalMax) histo.getBuckets().get(i).aggregations.get("max")).getValue(), equalTo((double) i)); if (i > 0) { - assertThat(((InternalSimpleValue)histo.getBuckets().get(i).aggregations.get("deriv")).getValue(), equalTo(1.0)); + assertThat(((InternalSimpleValue) histo.getBuckets().get(i).aggregations.get("deriv")).getValue(), equalTo(1.0)); } } - - }); + } + ); } @Override protected IndexSettings createIndexSettings() { - final Settings nodeSettings = Settings.builder() - .put("search.max_buckets", 25000).build(); + final Settings nodeSettings = Settings.builder().put("search.max_buckets", 25000).build(); return new IndexSettings( - IndexMetadata.builder("_index").settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) + IndexMetadata.builder("_index") + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) .numberOfShards(1) .numberOfReplicas(0) .creationDate(System.currentTimeMillis()) @@ -871,9 +917,12 @@ protected IndexSettings createIndexSettings() { ); } - private void testSearchCase(final Query query, final List dataset, - final Consumer configure, - final Consumer verify) throws IOException { + private void testSearchCase( + final Query query, + final List dataset, + final Consumer configure, + final Consumer verify + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { indexSampleData(dataset, indexWriter); @@ -889,13 +938,17 @@ private void testSearchCase(final Query query, final List dataset final DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggregationBuilder.field()); - MappedFieldType instantFieldType - = new NumberFieldMapper.NumberFieldType(INSTANT_FIELD, NumberFieldMapper.NumberType.LONG); - MappedFieldType numericFieldType - = new NumberFieldMapper.NumberFieldType(NUMERIC_FIELD, NumberFieldMapper.NumberType.LONG); - - final InternalAutoDateHistogram histogram = - searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType, instantFieldType, numericFieldType); + MappedFieldType instantFieldType = new NumberFieldMapper.NumberFieldType(INSTANT_FIELD, NumberFieldMapper.NumberType.LONG); + MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType(NUMERIC_FIELD, NumberFieldMapper.NumberType.LONG); + + final InternalAutoDateHistogram histogram = searchAndReduce( + indexSearcher, + query, + aggregationBuilder, + fieldType, + instantFieldType, + numericFieldType + ); verify.accept(histogram); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java index 25d52a2baad3d..d758660020e35 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java @@ -39,48 +39,58 @@ public abstract class DateHistogramAggregatorTestCase extends AggregatorTestCase protected static final String AGGREGABLE_DATE = "aggregable_date"; protected final void asSubAggTestCase(AggregationBuilder builder, Consumer verify) - throws IOException { + throws IOException { CheckedBiConsumer buildIndex = (iw, dft) -> { - iw.addDocument(List.of( - new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2020-02-01T00:00:00Z")), - new SortedSetDocValuesField("k1", new BytesRef("a")), - new Field("k1", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), - new SortedSetDocValuesField("k2", new BytesRef("a")), - new Field("k2", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), - new SortedNumericDocValuesField("n", 1) - )); - iw.addDocument(List.of( - new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2020-03-01T00:00:00Z")), - new SortedSetDocValuesField("k1", new BytesRef("a")), - new Field("k1", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), - new SortedSetDocValuesField("k2", new BytesRef("a")), - new Field("k2", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), - new SortedNumericDocValuesField("n", 2) - )); - iw.addDocument(List.of( - new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2021-02-01T00:00:00Z")), - new SortedSetDocValuesField("k1", new BytesRef("a")), - new Field("k1", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), - new SortedSetDocValuesField("k2", new BytesRef("a")), - new Field("k2", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), - new SortedNumericDocValuesField("n", 3) - )); - iw.addDocument(List.of( - new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2021-03-01T00:00:00Z")), - new SortedSetDocValuesField("k1", new BytesRef("a")), - new Field("k1", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), - new SortedSetDocValuesField("k2", new BytesRef("b")), - new Field("k2", new BytesRef("b"), KeywordFieldMapper.Defaults.FIELD_TYPE), - new SortedNumericDocValuesField("n", 4) - )); - iw.addDocument(List.of( - new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2020-02-01T00:00:00Z")), - new SortedSetDocValuesField("k1", new BytesRef("b")), - new Field("k1", new BytesRef("b"), KeywordFieldMapper.Defaults.FIELD_TYPE), - new SortedSetDocValuesField("k2", new BytesRef("b")), - new Field("k2", new BytesRef("b"), KeywordFieldMapper.Defaults.FIELD_TYPE), - new SortedNumericDocValuesField("n", 5) - )); + iw.addDocument( + List.of( + new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2020-02-01T00:00:00Z")), + new SortedSetDocValuesField("k1", new BytesRef("a")), + new Field("k1", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), + new SortedSetDocValuesField("k2", new BytesRef("a")), + new Field("k2", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), + new SortedNumericDocValuesField("n", 1) + ) + ); + iw.addDocument( + List.of( + new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2020-03-01T00:00:00Z")), + new SortedSetDocValuesField("k1", new BytesRef("a")), + new Field("k1", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), + new SortedSetDocValuesField("k2", new BytesRef("a")), + new Field("k2", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), + new SortedNumericDocValuesField("n", 2) + ) + ); + iw.addDocument( + List.of( + new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2021-02-01T00:00:00Z")), + new SortedSetDocValuesField("k1", new BytesRef("a")), + new Field("k1", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), + new SortedSetDocValuesField("k2", new BytesRef("a")), + new Field("k2", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), + new SortedNumericDocValuesField("n", 3) + ) + ); + iw.addDocument( + List.of( + new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2021-03-01T00:00:00Z")), + new SortedSetDocValuesField("k1", new BytesRef("a")), + new Field("k1", new BytesRef("a"), KeywordFieldMapper.Defaults.FIELD_TYPE), + new SortedSetDocValuesField("k2", new BytesRef("b")), + new Field("k2", new BytesRef("b"), KeywordFieldMapper.Defaults.FIELD_TYPE), + new SortedNumericDocValuesField("n", 4) + ) + ); + iw.addDocument( + List.of( + new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2020-02-01T00:00:00Z")), + new SortedSetDocValuesField("k1", new BytesRef("b")), + new Field("k1", new BytesRef("b"), KeywordFieldMapper.Defaults.FIELD_TYPE), + new SortedSetDocValuesField("k2", new BytesRef("b")), + new Field("k2", new BytesRef("b"), KeywordFieldMapper.Defaults.FIELD_TYPE), + new SortedNumericDocValuesField("n", 5) + ) + ); }; asSubAggTestCase(builder, buildIndex, verify); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index 42be5f162da80..9d96c8fbcf177 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -63,16 +63,17 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas private static final String SEARCHABLE_DATE = "searchable_date"; private static final List DATASET = Arrays.asList( - "2010-03-12T01:07:45", - "2010-04-27T03:43:34", - "2012-05-18T04:11:00", - "2013-05-29T05:11:31", - "2013-10-31T08:24:05", - "2015-02-13T13:09:32", - "2015-06-24T13:47:43", - "2015-11-13T16:14:34", - "2016-03-04T17:09:50", - "2017-12-12T22:55:46"); + "2010-03-12T01:07:45", + "2010-04-27T03:43:34", + "2012-05-18T04:11:00", + "2013-05-29T05:11:31", + "2013-10-31T08:24:05", + "2015-02-13T13:09:32", + "2015-06-24T13:47:43", + "2015-11-13T16:14:34", + "2016-03-04T17:09:50", + "2017-12-12T22:55:46" + ); public void testBooleanFieldDeprecated() throws IOException { final String fieldName = "bogusBoolean"; @@ -91,13 +92,19 @@ public void testBooleanFieldDeprecated() throws IOException { } public void testMatchNoDocs() throws IOException { - testSearchCase(new MatchNoDocsQuery(), DATASET, + testSearchCase( + new MatchNoDocsQuery(), + DATASET, aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE), - histogram -> assertEquals(0, histogram.getBuckets().size()), false + histogram -> assertEquals(0, histogram.getBuckets().size()), + false ); - testSearchCase(new MatchNoDocsQuery(), DATASET, + testSearchCase( + new MatchNoDocsQuery(), + DATASET, aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE), - histogram -> assertEquals(0, histogram.getBuckets().size()), false + histogram -> assertEquals(0, histogram.getBuckets().size()), + false ); } @@ -106,48 +113,66 @@ public void testMatchAllDocs() throws IOException { List foo = new ArrayList<>(); for (int i = 0; i < 1000; i++) { - foo.add(DATASET.get(randomIntBetween(0, DATASET.size()-1))); + foo.add(DATASET.get(randomIntBetween(0, DATASET.size() - 1))); } - testSearchCase(query, foo, + testSearchCase( + query, + foo, aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")) - .field(AGGREGABLE_DATE).order(BucketOrder.count(false)), - histogram -> assertEquals(8, histogram.getBuckets().size()), false + .field(AGGREGABLE_DATE) + .order(BucketOrder.count(false)), + histogram -> assertEquals(8, histogram.getBuckets().size()), + false ); - testSearchCase(query, DATASET, + testSearchCase( + query, + DATASET, aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE), - histogram -> assertEquals(8, histogram.getBuckets().size()), false + histogram -> assertEquals(8, histogram.getBuckets().size()), + false ); - testSearchCase(query, DATASET, + testSearchCase( + query, + DATASET, aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE).minDocCount(1L), - histogram -> assertEquals(6, histogram.getBuckets().size()), false + histogram -> assertEquals(6, histogram.getBuckets().size()), + false ); - testSearchCase(query, DATASET, + testSearchCase( + query, + DATASET, aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE), - histogram -> assertEquals(8, histogram.getBuckets().size()), false + histogram -> assertEquals(8, histogram.getBuckets().size()), + false ); - testSearchCase(query, DATASET, + testSearchCase( + query, + DATASET, aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE).minDocCount(1L), - histogram -> assertEquals(6, histogram.getBuckets().size()), false + histogram -> assertEquals(6, histogram.getBuckets().size()), + false ); } public void testAsSubAgg() throws IOException { - AggregationBuilder builder = new TermsAggregationBuilder("k1").field("k1").subAggregation( - new DateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE).calendarInterval(DateHistogramInterval.YEAR)); + AggregationBuilder builder = new TermsAggregationBuilder("k1").field("k1") + .subAggregation(new DateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE).calendarInterval(DateHistogramInterval.YEAR)); asSubAggTestCase(builder, (StringTerms terms) -> { StringTerms.Bucket a = terms.getBucketByKey("a"); InternalDateHistogram adh = a.getAggregations().get("dh"); - assertThat(adh.getBuckets().stream().map(bucket -> bucket.getKey().toString()).collect(toList()), equalTo(List.of( - "2020-01-01T00:00Z", "2021-01-01T00:00Z" - ))); + assertThat( + adh.getBuckets().stream().map(bucket -> bucket.getKey().toString()).collect(toList()), + equalTo(List.of("2020-01-01T00:00Z", "2021-01-01T00:00Z")) + ); StringTerms.Bucket b = terms.getBucketByKey("b"); InternalDateHistogram bdh = b.getAggregations().get("dh"); - assertThat(bdh.getBuckets().stream().map(bucket -> bucket.getKey().toString()).collect(toList()), equalTo(List.of( - "2020-01-01T00:00Z" - ))); + assertThat( + bdh.getBuckets().stream().map(bucket -> bucket.getKey().toString()).collect(toList()), + equalTo(List.of("2020-01-01T00:00Z")) + ); }); builder = new TermsAggregationBuilder("k2").field("k2").subAggregation(builder); asSubAggTestCase(builder, (StringTerms terms) -> { @@ -155,60 +180,62 @@ public void testAsSubAgg() throws IOException { StringTerms ak1 = a.getAggregations().get("k1"); StringTerms.Bucket ak1a = ak1.getBucketByKey("a"); InternalDateHistogram ak1adh = ak1a.getAggregations().get("dh"); - assertThat(ak1adh.getBuckets().stream().map(bucket -> bucket.getKey().toString()).collect(toList()), equalTo(List.of( - "2020-01-01T00:00Z", "2021-01-01T00:00Z" - ))); + assertThat( + ak1adh.getBuckets().stream().map(bucket -> bucket.getKey().toString()).collect(toList()), + equalTo(List.of("2020-01-01T00:00Z", "2021-01-01T00:00Z")) + ); StringTerms.Bucket b = terms.getBucketByKey("b"); StringTerms bk1 = b.getAggregations().get("k1"); StringTerms.Bucket bk1a = bk1.getBucketByKey("a"); InternalDateHistogram bk1adh = bk1a.getAggregations().get("dh"); - assertThat(bk1adh.getBuckets().stream().map(bucket -> bucket.getKey().toString()).collect(toList()), equalTo(List.of( - "2021-01-01T00:00Z" - ))); + assertThat( + bk1adh.getBuckets().stream().map(bucket -> bucket.getKey().toString()).collect(toList()), + equalTo(List.of("2021-01-01T00:00Z")) + ); StringTerms.Bucket bk1b = bk1.getBucketByKey("b"); InternalDateHistogram bk1bdh = bk1b.getAggregations().get("dh"); - assertThat(bk1bdh.getBuckets().stream().map(bucket -> bucket.getKey().toString()).collect(toList()), equalTo(List.of( - "2020-01-01T00:00Z" - ))); + assertThat( + bk1bdh.getBuckets().stream().map(bucket -> bucket.getKey().toString()).collect(toList()), + equalTo(List.of("2020-01-01T00:00Z")) + ); }); } public void testNoDocs() throws IOException { Query query = new MatchNoDocsQuery(); List dates = Collections.emptyList(); - Consumer aggregation = agg -> - agg.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE); - testSearchCase(query, dates, aggregation, - histogram -> assertEquals(0, histogram.getBuckets().size()), false - ); - testSearchCase(query, dates, aggregation, - histogram -> assertEquals(0, histogram.getBuckets().size()), false - ); - - aggregation = agg -> - agg.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE); - testSearchCase(query, dates, aggregation, - histogram -> assertEquals(0, histogram.getBuckets().size()), false - ); - testSearchCase(query, dates, aggregation, - histogram -> assertEquals(0, histogram.getBuckets().size()), false - ); + Consumer aggregation = agg -> agg.calendarInterval(DateHistogramInterval.YEAR) + .field(AGGREGABLE_DATE); + testSearchCase(query, dates, aggregation, histogram -> assertEquals(0, histogram.getBuckets().size()), false); + testSearchCase(query, dates, aggregation, histogram -> assertEquals(0, histogram.getBuckets().size()), false); + + aggregation = agg -> agg.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE); + testSearchCase(query, dates, aggregation, histogram -> assertEquals(0, histogram.getBuckets().size()), false); + testSearchCase(query, dates, aggregation, histogram -> assertEquals(0, histogram.getBuckets().size()), false); } public void testAggregateWrongField() throws IOException { - testSearchCase(new MatchAllDocsQuery(), DATASET, + testSearchCase( + new MatchAllDocsQuery(), + DATASET, aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field("wrong_field"), - histogram -> assertEquals(0, histogram.getBuckets().size()), false + histogram -> assertEquals(0, histogram.getBuckets().size()), + false ); - testSearchCase(new MatchAllDocsQuery(), DATASET, + testSearchCase( + new MatchAllDocsQuery(), + DATASET, aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field("wrong_field"), - histogram -> assertEquals(0, histogram.getBuckets().size()), false + histogram -> assertEquals(0, histogram.getBuckets().size()), + false ); } public void testIntervalYear() throws IOException { - testSearchCase(LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), DATASET, + testSearchCase( + LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), + DATASET, aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE), histogram -> { List buckets = histogram.getBuckets(); @@ -225,12 +252,14 @@ public void testIntervalYear() throws IOException { bucket = buckets.get(2); assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); - }, false + }, + false ); } public void testIntervalMonth() throws IOException { - testSearchCase(new MatchAllDocsQuery(), + testSearchCase( + new MatchAllDocsQuery(), Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), aggregation -> aggregation.calendarInterval(DateHistogramInterval.MONTH).field(AGGREGABLE_DATE), histogram -> { @@ -248,21 +277,15 @@ public void testIntervalMonth() throws IOException { bucket = buckets.get(2); assertEquals("2017-03-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); - }, false + }, + false ); } public void testIntervalDay() throws IOException { - testSearchCase(new MatchAllDocsQuery(), - Arrays.asList( - "2017-02-01", - "2017-02-02", - "2017-02-02", - "2017-02-03", - "2017-02-03", - "2017-02-03", - "2017-02-05" - ), + testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY).field(AGGREGABLE_DATE).minDocCount(1L), histogram -> { List buckets = histogram.getBuckets(); @@ -283,18 +306,12 @@ public void testIntervalDay() throws IOException { bucket = buckets.get(3); assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); - }, false + }, + false ); - testSearchCase(new MatchAllDocsQuery(), - Arrays.asList( - "2017-02-01", - "2017-02-02", - "2017-02-02", - "2017-02-03", - "2017-02-03", - "2017-02-03", - "2017-02-05" - ), + testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), aggregation -> aggregation.fixedInterval(new DateHistogramInterval("24h")).field(AGGREGABLE_DATE).minDocCount(1L), histogram -> { List buckets = histogram.getBuckets(); @@ -315,12 +332,14 @@ public void testIntervalDay() throws IOException { bucket = buckets.get(3); assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); - }, false + }, + false ); } public void testIntervalHour() throws IOException { - testSearchCase(new MatchAllDocsQuery(), + testSearchCase( + new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:00.000Z", "2017-02-01T09:35:00.000Z", @@ -361,9 +380,11 @@ public void testIntervalHour() throws IOException { bucket = buckets.get(5); assertEquals("2017-02-01T16:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); - }, false + }, + false ); - testSearchCase(new MatchAllDocsQuery(), + testSearchCase( + new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:00.000Z", "2017-02-01T09:35:00.000Z", @@ -404,12 +425,14 @@ public void testIntervalHour() throws IOException { bucket = buckets.get(5); assertEquals("2017-02-01T16:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); - }, false + }, + false ); } public void testIntervalMinute() throws IOException { - testSearchCase(new MatchAllDocsQuery(), + testSearchCase( + new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:35.000Z", "2017-02-01T09:02:59.000Z", @@ -433,9 +456,11 @@ public void testIntervalMinute() throws IOException { bucket = buckets.get(2); assertEquals("2017-02-01T09:16:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); - }, false + }, + false ); - testSearchCase(new MatchAllDocsQuery(), + testSearchCase( + new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:35.000Z", "2017-02-01T09:02:59.000Z", @@ -459,12 +484,14 @@ public void testIntervalMinute() throws IOException { bucket = buckets.get(2); assertEquals("2017-02-01T09:16:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); - }, false + }, + false ); } public void testIntervalSecond() throws IOException { - testSearchCase(new MatchAllDocsQuery(), + testSearchCase( + new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015Z", "2017-02-01T00:00:11.299Z", @@ -489,9 +516,11 @@ public void testIntervalSecond() throws IOException { bucket = buckets.get(2); assertEquals("2017-02-01T00:00:37.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); - }, false + }, + false ); - testSearchCase(new MatchAllDocsQuery(), + testSearchCase( + new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015Z", "2017-02-01T00:00:11.299Z", @@ -516,12 +545,14 @@ public void testIntervalSecond() throws IOException { bucket = buckets.get(2); assertEquals("2017-02-01T00:00:37.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); - }, false + }, + false ); } public void testNanosIntervalSecond() throws IOException { - testSearchCase(new MatchAllDocsQuery(), + testSearchCase( + new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015298384Z", "2017-02-01T00:00:11.299954583Z", @@ -546,9 +577,11 @@ public void testNanosIntervalSecond() throws IOException { bucket = buckets.get(2); assertEquals("2017-02-01T00:00:37.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); - }, true + }, + true ); - testSearchCase(new MatchAllDocsQuery(), + testSearchCase( + new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015298384Z", "2017-02-01T00:00:11.299954583Z", @@ -573,7 +606,8 @@ public void testNanosIntervalSecond() throws IOException { bucket = buckets.get(2); assertEquals("2017-02-01T00:00:37.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); - }, true + }, + true ); } @@ -588,7 +622,9 @@ public void testMinDocCount() throws IOException { ); // 5 sec interval with minDocCount = 0 - testSearchCase(query, timestamps, + testSearchCase( + query, + timestamps, aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(0L), histogram -> { List buckets = histogram.getBuckets(); @@ -609,11 +645,14 @@ public void testMinDocCount() throws IOException { bucket = buckets.get(3); assertEquals("2017-02-01T00:00:20.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); - }, false + }, + false ); // 5 sec interval with minDocCount = 3 - testSearchCase(query, timestamps, + testSearchCase( + query, + timestamps, aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(3L), histogram -> { List buckets = histogram.getBuckets(); @@ -622,103 +661,99 @@ public void testMinDocCount() throws IOException { Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T00:00:10.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); - }, false + }, + false ); } public void testFixedWithCalendar() throws IOException { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), - Arrays.asList( - "2017-02-01", - "2017-02-02", - "2017-02-02", - "2017-02-03", - "2017-02-03", - "2017-02-03", - "2017-02-05" - ), - aggregation -> aggregation.fixedInterval(DateHistogramInterval.WEEK).field(AGGREGABLE_DATE), - histogram -> {}, false - )); - assertThat(e.getMessage(), equalTo("failed to parse setting [date_histogram.fixedInterval] with value [1w] as a time value: " + - "unit is missing or unrecognized")); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.fixedInterval(DateHistogramInterval.WEEK).field(AGGREGABLE_DATE), + histogram -> {}, + false + ) + ); + assertThat( + e.getMessage(), + equalTo( + "failed to parse setting [date_histogram.fixedInterval] with value [1w] as a time value: " + + "unit is missing or unrecognized" + ) + ); } public void testCalendarWithFixed() throws IOException { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), - Arrays.asList( - "2017-02-01", - "2017-02-02", - "2017-02-02", - "2017-02-03", - "2017-02-03", - "2017-02-03", - "2017-02-05" - ), - aggregation -> aggregation.calendarInterval(new DateHistogramInterval("5d")).field(AGGREGABLE_DATE), - histogram -> {}, false - )); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.calendarInterval(new DateHistogramInterval("5d")).field(AGGREGABLE_DATE), + histogram -> {}, + false + ) + ); assertThat(e.getMessage(), equalTo("The supplied interval [5d] could not be parsed as a calendar interval.")); } public void testCalendarAndThenFixed() throws IOException { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), - Arrays.asList( - "2017-02-01", - "2017-02-02", - "2017-02-02", - "2017-02-03", - "2017-02-03", - "2017-02-03", - "2017-02-05" - ), - aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) - .fixedInterval(new DateHistogramInterval("2d")) - .field(AGGREGABLE_DATE), - histogram -> {}, false - )); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) + .fixedInterval(new DateHistogramInterval("2d")) + .field(AGGREGABLE_DATE), + histogram -> {}, + false + ) + ); assertThat(e.getMessage(), equalTo("Cannot use [fixed_interval] with [calendar_interval] configuration option.")); } public void testFixedAndThenCalendar() throws IOException { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), - Arrays.asList( - "2017-02-01", - "2017-02-02", - "2017-02-02", - "2017-02-03", - "2017-02-03", - "2017-02-03", - "2017-02-05" - ), - aggregation -> aggregation.fixedInterval(new DateHistogramInterval("2d")) - .calendarInterval(DateHistogramInterval.DAY) - .field(AGGREGABLE_DATE), - histogram -> {}, false - )); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("2d")) + .calendarInterval(DateHistogramInterval.DAY) + .field(AGGREGABLE_DATE), + histogram -> {}, + false + ) + ); assertThat(e.getMessage(), equalTo("Cannot use [calendar_interval] with [fixed_interval] configuration option.")); } public void testOverlappingBounds() { - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), - Arrays.asList( - "2017-02-01", - "2017-02-02", - "2017-02-02", - "2017-02-03", - "2017-02-03", - "2017-02-03", - "2017-02-05" - ), - aggregation -> aggregation .calendarInterval(DateHistogramInterval.DAY) - .hardBounds(new LongBounds("2010-01-01", "2020-01-01")) - .extendedBounds(new LongBounds("2009-01-01", "2021-01-01")) - .field(AGGREGABLE_DATE), - histogram -> {}, false - )); + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> testSearchCase( + new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) + .hardBounds(new LongBounds("2010-01-01", "2020-01-01")) + .extendedBounds(new LongBounds("2009-01-01", "2021-01-01")) + .field(AGGREGABLE_DATE), + histogram -> {}, + false + ) + ); - assertThat(ex.getMessage(), equalTo("Extended bounds have to be inside hard bounds, " + - "hard bounds: [2010-01-01--2020-01-01], extended bounds: [2009-01-01--2021-01-01]")); + assertThat( + ex.getMessage(), + equalTo( + "Extended bounds have to be inside hard bounds, " + + "hard bounds: [2010-01-01--2020-01-01], extended bounds: [2009-01-01--2021-01-01]" + ) + ); } public void testFewRoundingPointsUsesFromRange() throws IOException { @@ -820,25 +855,32 @@ public void testOneBucketOptimized() throws IOException { assertThat(result.getBuckets().get(0).getDocCount(), equalTo(5000L)); assertThat(impl, equalTo(DateHistogramAggregator.FromDateRange.class)); - assertMap(debug, matchesMap() - .entry("d", matchesMap() - .entry("delegate", "RangeAggregator.FromFilters") - .entry("delegate_debug", matchesMap() - .entry("ranges", 1) - .entry("average_docs_per_range", 5010.0) - .entry("delegate", "FilterByFilterAggregator") - .entry("delegate_debug", matchesMap() - .entry("segments_with_doc_count_field", 0) - .entry("segments_with_deleted_docs", 0) - .entry("segments_counted", greaterThan(0)) - .entry("segments_collected", 0) - .entry("filters", matchesList().item(matchesMap() - .entry("query", "DocValuesFieldExistsQuery [field=f]") - .entry("specialized_for", "docvalues_field_exists") - .entry("results_from_metadata", greaterThan(0))) - ) + assertMap( + debug, + matchesMap().entry( + "d", + matchesMap().entry("delegate", "RangeAggregator.FromFilters") + .entry( + "delegate_debug", + matchesMap().entry("ranges", 1) + .entry("average_docs_per_range", 5010.0) + .entry("delegate", "FilterByFilterAggregator") + .entry( + "delegate_debug", + matchesMap().entry("segments_with_doc_count_field", 0) + .entry("segments_with_deleted_docs", 0) + .entry("segments_counted", greaterThan(0)) + .entry("segments_collected", 0) + .entry( + "filters", + matchesList().item( + matchesMap().entry("query", "DocValuesFieldExistsQuery [field=f]") + .entry("specialized_for", "docvalues_field_exists") + .entry("results_from_metadata", greaterThan(0)) + ) + ) + ) ) - ) ) ); }, @@ -944,16 +986,24 @@ public void testBuildEmpty() throws IOException { ); } - private void testSearchCase(Query query, List dataset, - Consumer configure, - Consumer verify, boolean useNanosecondResolution) throws IOException { + private void testSearchCase( + Query query, + List dataset, + Consumer configure, + Consumer verify, + boolean useNanosecondResolution + ) throws IOException { testSearchCase(query, dataset, configure, verify, 10000, useNanosecondResolution); } - private void testSearchCase(Query query, List dataset, - Consumer configure, - Consumer verify, - int maxBucket, boolean useNanosecondResolution) throws IOException { + private void testSearchCase( + Query query, + List dataset, + Consumer configure, + Consumer verify, + int maxBucket, + boolean useNanosecondResolution + ) throws IOException { boolean aggregableDateIsSearchable = randomBoolean(); DateFieldMapper.DateFieldType fieldType = aggregableDateFieldType(useNanosecondResolution, aggregableDateIsSearchable); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java index ba800e22c3aad..263cd50e55600 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java @@ -24,26 +24,35 @@ protected DateHistogramAggregationBuilder createTestAggregatorBuilder() { factory.fixedInterval(new DateHistogramInterval(randomIntBetween(1, 100000) + "ms")); } else { if (randomBoolean()) { - factory.calendarInterval(randomFrom(DateHistogramInterval.YEAR, DateHistogramInterval.QUARTER, - DateHistogramInterval.MONTH, DateHistogramInterval.WEEK, DateHistogramInterval.DAY, DateHistogramInterval.HOUR, - DateHistogramInterval.MINUTE, DateHistogramInterval.SECOND)); + factory.calendarInterval( + randomFrom( + DateHistogramInterval.YEAR, + DateHistogramInterval.QUARTER, + DateHistogramInterval.MONTH, + DateHistogramInterval.WEEK, + DateHistogramInterval.DAY, + DateHistogramInterval.HOUR, + DateHistogramInterval.MINUTE, + DateHistogramInterval.SECOND + ) + ); } else { int branch = randomInt(3); switch (branch) { - case 0: - factory.fixedInterval(DateHistogramInterval.seconds(randomIntBetween(1, 1000))); - break; - case 1: - factory.fixedInterval(DateHistogramInterval.minutes(randomIntBetween(1, 1000))); - break; - case 2: - factory.fixedInterval(DateHistogramInterval.hours(randomIntBetween(1, 1000))); - break; - case 3: - factory.fixedInterval(DateHistogramInterval.days(randomIntBetween(1, 1000))); - break; - default: - throw new IllegalStateException("invalid branch: " + branch); + case 0: + factory.fixedInterval(DateHistogramInterval.seconds(randomIntBetween(1, 1000))); + break; + case 1: + factory.fixedInterval(DateHistogramInterval.minutes(randomIntBetween(1, 1000))); + break; + case 2: + factory.fixedInterval(DateHistogramInterval.hours(randomIntBetween(1, 1000))); + break; + case 3: + factory.fixedInterval(DateHistogramInterval.days(randomIntBetween(1, 1000))); + break; + default: + throw new IllegalStateException("invalid branch: " + branch); } } } @@ -67,7 +76,7 @@ protected DateHistogramAggregationBuilder createTestAggregatorBuilder() { } if (randomBoolean()) { List order = randomOrder(); - if(order.size() == 1 && randomBoolean()) { + if (order.size() == 1 && randomBoolean()) { factory.order(order.get(0)); } else { factory.order(order); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapperTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapperTests.java index 5863de486b4b7..3f9986dd1fe80 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapperTests.java @@ -67,36 +67,37 @@ public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); try (StreamInput in = out.bytes().streamInput()) { - assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), - equalTo(DateIntervalWrapper.IntervalTypeEnum.NONE)); + assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), equalTo(DateIntervalWrapper.IntervalTypeEnum.NONE)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); try (StreamInput in = out.bytes().streamInput()) { - assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), - equalTo(DateIntervalWrapper.IntervalTypeEnum.FIXED)); + assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), equalTo(DateIntervalWrapper.IntervalTypeEnum.FIXED)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(2); try (StreamInput in = out.bytes().streamInput()) { - assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), - equalTo(DateIntervalWrapper.IntervalTypeEnum.CALENDAR)); + assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), equalTo(DateIntervalWrapper.IntervalTypeEnum.CALENDAR)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(3); try (StreamInput in = out.bytes().streamInput()) { - assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), - equalTo(DateIntervalWrapper.IntervalTypeEnum.LEGACY_INTERVAL)); + assertThat( + DateIntervalWrapper.IntervalTypeEnum.fromStream(in), + equalTo(DateIntervalWrapper.IntervalTypeEnum.LEGACY_INTERVAL) + ); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(4); try (StreamInput in = out.bytes().streamInput()) { - assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), - equalTo(DateIntervalWrapper.IntervalTypeEnum.LEGACY_DATE_HISTO)); + assertThat( + DateIntervalWrapper.IntervalTypeEnum.fromStream(in), + equalTo(DateIntervalWrapper.IntervalTypeEnum.LEGACY_DATE_HISTO) + ); } } } @@ -107,7 +108,7 @@ public void testInvalidReadFrom() throws Exception { try (StreamInput in = out.bytes().streamInput()) { DateIntervalWrapper.IntervalTypeEnum.fromStream(in); fail("Expected IOException"); - } catch(IOException e) { + } catch (IOException e) { assertThat(e.getMessage(), containsString("Unknown IntervalTypeEnum ordinal [")); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java index d9ffdec08ccba..0b0e68e9f5fb2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java @@ -19,9 +19,9 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.RangeFieldMapper; @@ -43,8 +43,13 @@ public class DateRangeHistogramAggregatorTests extends AggregatorTestCase { public static final String FIELD_NAME = "fieldName"; public void testBasics() throws Exception { - RangeFieldMapper.Range range = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-01T12:14:36"), - asLong("2019-08-01T15:07:22"), true, true); + RangeFieldMapper.Range range = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-01T12:14:36"), + asLong("2019-08-01T15:07:22"), + true, + true + ); testCase( new MatchAllDocsQuery(), builder -> builder.calendarInterval(DateHistogramInterval.DAY), @@ -57,8 +62,13 @@ public void testBasics() throws Exception { } public void testFormat() throws Exception { - RangeFieldMapper.Range range = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-01T12:14:36"), - asLong("2019-08-01T15:07:22"), true, true); + RangeFieldMapper.Range range = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-01T12:14:36"), + asLong("2019-08-01T15:07:22"), + true, + true + ); testCase( new MatchAllDocsQuery(), builder -> builder.calendarInterval(DateHistogramInterval.DAY).format("yyyy-MM-dd"), @@ -76,16 +86,13 @@ public void testUnsupportedRangeType() throws Exception { RangeType rangeType = RangeType.LONG; final String fieldName = "field"; - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { Document doc = new Document(); - BytesRef encodedRange = - rangeType.encodeRanges(singleton(new RangeFieldMapper.Range(rangeType, 12234, 89765, true, true))); + BytesRef encodedRange = rangeType.encodeRanges(singleton(new RangeFieldMapper.Range(rangeType, 12234, 89765, true, true))); doc.add(new BinaryDocValuesField(fieldName, encodedRange)); w.addDocument(doc); - DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("my_agg") - .field(fieldName) + DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("my_agg").field(fieldName) .calendarInterval(DateHistogramInterval.MONTH); MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, rangeType); @@ -101,297 +108,367 @@ public void testUnsupportedRangeType() throws Exception { * Test calendar interval behaves correctly on months over 30 days */ public void testLongMonthsCalendarInterval() throws Exception { - RangeFieldMapper.Range julyRange = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T00:00:00"), - asLong("2019-07-31T23:59:59"), true, true); - RangeFieldMapper.Range augustRange = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-01T00:00:00"), - asLong("2019-08-31T23:59:59"), true, true); - RangeFieldMapper.Range septemberRange = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-09-01T00:00:00"), - asLong("2019-09-30T23:59:59"), true, true); + RangeFieldMapper.Range julyRange = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T00:00:00"), + asLong("2019-07-31T23:59:59"), + true, + true + ); + RangeFieldMapper.Range augustRange = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-01T00:00:00"), + asLong("2019-08-31T23:59:59"), + true, + true + ); + RangeFieldMapper.Range septemberRange = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-09-01T00:00:00"), + asLong("2019-09-30T23:59:59"), + true, + true + ); // Calendar interval case - three months, three bucketLong.MIN_VALUE;s - testCase( - new MatchAllDocsQuery(), - builder -> builder.calendarInterval(DateHistogramInterval.MONTH), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(julyRange))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(augustRange))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(septemberRange))))); - }, - histo -> { - assertEquals(3, histo.getBuckets().size()); + testCase(new MatchAllDocsQuery(), builder -> builder.calendarInterval(DateHistogramInterval.MONTH), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(julyRange))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(augustRange))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(septemberRange))))); + }, histo -> { + assertEquals(3, histo.getBuckets().size()); - assertEquals(asZDT("2019-07-01T00:00:00"), histo.getBuckets().get(0).getKey()); - assertEquals(1, histo.getBuckets().get(0).getDocCount()); + assertEquals(asZDT("2019-07-01T00:00:00"), histo.getBuckets().get(0).getKey()); + assertEquals(1, histo.getBuckets().get(0).getDocCount()); - assertEquals(asZDT("2019-08-01T00:00:00"), histo.getBuckets().get(1).getKey()); - assertEquals(1, histo.getBuckets().get(1).getDocCount()); + assertEquals(asZDT("2019-08-01T00:00:00"), histo.getBuckets().get(1).getKey()); + assertEquals(1, histo.getBuckets().get(1).getDocCount()); - assertEquals(asZDT("2019-09-01T00:00:00"), histo.getBuckets().get(2).getKey()); - assertEquals(1, histo.getBuckets().get(2).getDocCount()); + assertEquals(asZDT("2019-09-01T00:00:00"), histo.getBuckets().get(2).getKey()); + assertEquals(1, histo.getBuckets().get(2).getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } - ); + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); } /* * Test fixed interval 30d behaves correctly with months over 30 days */ public void testLongMonthsFixedInterval() throws Exception { - RangeFieldMapper.Range julyRange = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T00:00:00"), - asLong("2019-07-31T23:59:59"), true, true); - RangeFieldMapper.Range augustRange = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-01T00:00:00"), - asLong("2019-08-31T23:59:59"), true, true); - RangeFieldMapper.Range septemberRange = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-09-01T00:00:00"), - asLong("2019-09-30T23:59:59"), true, true); + RangeFieldMapper.Range julyRange = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T00:00:00"), + asLong("2019-07-31T23:59:59"), + true, + true + ); + RangeFieldMapper.Range augustRange = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-01T00:00:00"), + asLong("2019-08-31T23:59:59"), + true, + true + ); + RangeFieldMapper.Range septemberRange = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-09-01T00:00:00"), + asLong("2019-09-30T23:59:59"), + true, + true + ); // Fixed interval case - 4 periods of 30 days - testCase( - new MatchAllDocsQuery(), - builder -> builder.fixedInterval(new DateHistogramInterval("30d")), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(julyRange))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(augustRange))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(septemberRange))))); - }, - histo -> { - assertEquals(4, histo.getBuckets().size()); + testCase(new MatchAllDocsQuery(), builder -> builder.fixedInterval(new DateHistogramInterval("30d")), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(julyRange))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(augustRange))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(septemberRange))))); + }, histo -> { + assertEquals(4, histo.getBuckets().size()); - assertEquals(asZDT("2019-06-13T00:00:00"), histo.getBuckets().get(0).getKey()); - assertEquals(1, histo.getBuckets().get(0).getDocCount()); + assertEquals(asZDT("2019-06-13T00:00:00"), histo.getBuckets().get(0).getKey()); + assertEquals(1, histo.getBuckets().get(0).getDocCount()); - assertEquals(asZDT("2019-07-13T00:00:00"), histo.getBuckets().get(1).getKey()); - assertEquals(2, histo.getBuckets().get(1).getDocCount()); + assertEquals(asZDT("2019-07-13T00:00:00"), histo.getBuckets().get(1).getKey()); + assertEquals(2, histo.getBuckets().get(1).getDocCount()); - assertEquals(asZDT("2019-08-12T00:00:00"), histo.getBuckets().get(2).getKey()); - assertEquals(2, histo.getBuckets().get(2).getDocCount()); + assertEquals(asZDT("2019-08-12T00:00:00"), histo.getBuckets().get(2).getKey()); + assertEquals(2, histo.getBuckets().get(2).getDocCount()); - assertEquals(asZDT("2019-09-11T00:00:00"), histo.getBuckets().get(3).getKey()); - assertEquals(1, histo.getBuckets().get(3).getDocCount()); + assertEquals(asZDT("2019-09-11T00:00:00"), histo.getBuckets().get(3).getKey()); + assertEquals(1, histo.getBuckets().get(3).getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } - ); + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); } public void testOffsetCalendarInterval() throws Exception { - RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T03:15:00"), - asLong("2019-07-01T03:20:00"), true, true); - RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T03:45:00"), - asLong("2019-07-01T03:50:00"), true, true); - RangeFieldMapper.Range range3 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T03:55:00"), - asLong("2019-07-01T04:05:00"), true, true); - RangeFieldMapper.Range range4 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T04:17:00"), - asLong("2019-07-01T04:19:00"), true, true); - RangeFieldMapper.Range range5 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T04:55:00"), - asLong("2019-07-01T05:05:00"), true, true); + RangeFieldMapper.Range range1 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T03:15:00"), + asLong("2019-07-01T03:20:00"), + true, + true + ); + RangeFieldMapper.Range range2 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T03:45:00"), + asLong("2019-07-01T03:50:00"), + true, + true + ); + RangeFieldMapper.Range range3 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T03:55:00"), + asLong("2019-07-01T04:05:00"), + true, + true + ); + RangeFieldMapper.Range range4 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T04:17:00"), + asLong("2019-07-01T04:19:00"), + true, + true + ); + RangeFieldMapper.Range range5 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T04:55:00"), + asLong("2019-07-01T05:05:00"), + true, + true + ); // No offset, just to make sure the ranges line up as expected - testCase( - new MatchAllDocsQuery(), - builder -> builder.calendarInterval(DateHistogramInterval.HOUR), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); - }, - histo -> { - assertEquals(3, histo.getBuckets().size()); + testCase(new MatchAllDocsQuery(), builder -> builder.calendarInterval(DateHistogramInterval.HOUR), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); + }, histo -> { + assertEquals(3, histo.getBuckets().size()); - assertEquals(asZDT("2019-07-01T03:00:00"), histo.getBuckets().get(0).getKey()); - assertEquals(3, histo.getBuckets().get(0).getDocCount()); + assertEquals(asZDT("2019-07-01T03:00:00"), histo.getBuckets().get(0).getKey()); + assertEquals(3, histo.getBuckets().get(0).getDocCount()); - assertEquals(asZDT("2019-07-01T04:00:00"), histo.getBuckets().get(1).getKey()); - assertEquals(3, histo.getBuckets().get(1).getDocCount()); + assertEquals(asZDT("2019-07-01T04:00:00"), histo.getBuckets().get(1).getKey()); + assertEquals(3, histo.getBuckets().get(1).getDocCount()); - assertEquals(asZDT("2019-07-01T05:00:00"), histo.getBuckets().get(2).getKey()); - assertEquals(1, histo.getBuckets().get(2).getDocCount()); + assertEquals(asZDT("2019-07-01T05:00:00"), histo.getBuckets().get(2).getKey()); + assertEquals(1, histo.getBuckets().get(2).getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } - ); + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); // 10 minute offset should shift all data into one bucket - testCase( - new MatchAllDocsQuery(), - builder -> builder.calendarInterval(DateHistogramInterval.HOUR).offset("10m"), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); - }, - histo -> { - assertEquals(2, histo.getBuckets().size()); - - assertEquals(asZDT("2019-07-01T03:10:00"), histo.getBuckets().get(0).getKey()); - assertEquals(3, histo.getBuckets().get(0).getDocCount()); - - assertEquals(asZDT("2019-07-01T04:10:00"), histo.getBuckets().get(1).getKey()); - assertEquals(2, histo.getBuckets().get(1).getDocCount()); - - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } - ); + testCase(new MatchAllDocsQuery(), builder -> builder.calendarInterval(DateHistogramInterval.HOUR).offset("10m"), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); + }, histo -> { + assertEquals(2, histo.getBuckets().size()); + + assertEquals(asZDT("2019-07-01T03:10:00"), histo.getBuckets().get(0).getKey()); + assertEquals(3, histo.getBuckets().get(0).getDocCount()); + + assertEquals(asZDT("2019-07-01T04:10:00"), histo.getBuckets().get(1).getKey()); + assertEquals(2, histo.getBuckets().get(1).getDocCount()); + + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); } public void testOffsetFixedInterval() throws Exception { - RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T03:15:00"), - asLong("2019-07-01T03:20:00"), true, true); - RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T03:45:00"), - asLong("2019-07-01T03:50:00"), true, true); - RangeFieldMapper.Range range3 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T03:55:00"), - asLong("2019-07-01T04:05:00"), true, true); - RangeFieldMapper.Range range4 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T04:17:00"), - asLong("2019-07-01T04:19:00"), true, true); - RangeFieldMapper.Range range5 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T04:55:00"), - asLong("2019-07-01T05:05:00"), true, true); + RangeFieldMapper.Range range1 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T03:15:00"), + asLong("2019-07-01T03:20:00"), + true, + true + ); + RangeFieldMapper.Range range2 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T03:45:00"), + asLong("2019-07-01T03:50:00"), + true, + true + ); + RangeFieldMapper.Range range3 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T03:55:00"), + asLong("2019-07-01T04:05:00"), + true, + true + ); + RangeFieldMapper.Range range4 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T04:17:00"), + asLong("2019-07-01T04:19:00"), + true, + true + ); + RangeFieldMapper.Range range5 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T04:55:00"), + asLong("2019-07-01T05:05:00"), + true, + true + ); // No offset, just to make sure the ranges line up as expected - testCase( - new MatchAllDocsQuery(), - builder -> builder.fixedInterval(new DateHistogramInterval("1h")), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); - }, - histo -> { - assertEquals(3, histo.getBuckets().size()); + testCase(new MatchAllDocsQuery(), builder -> builder.fixedInterval(new DateHistogramInterval("1h")), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); + }, histo -> { + assertEquals(3, histo.getBuckets().size()); - assertEquals(asZDT("2019-07-01T03:00:00"), histo.getBuckets().get(0).getKey()); - assertEquals(3, histo.getBuckets().get(0).getDocCount()); + assertEquals(asZDT("2019-07-01T03:00:00"), histo.getBuckets().get(0).getKey()); + assertEquals(3, histo.getBuckets().get(0).getDocCount()); - assertEquals(asZDT("2019-07-01T04:00:00"), histo.getBuckets().get(1).getKey()); - assertEquals(3, histo.getBuckets().get(1).getDocCount()); + assertEquals(asZDT("2019-07-01T04:00:00"), histo.getBuckets().get(1).getKey()); + assertEquals(3, histo.getBuckets().get(1).getDocCount()); - assertEquals(asZDT("2019-07-01T05:00:00"), histo.getBuckets().get(2).getKey()); - assertEquals(1, histo.getBuckets().get(2).getDocCount()); + assertEquals(asZDT("2019-07-01T05:00:00"), histo.getBuckets().get(2).getKey()); + assertEquals(1, histo.getBuckets().get(2).getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } - ); + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); // 10 minute offset should shift all data into one bucket - testCase( - new MatchAllDocsQuery(), - builder -> builder.fixedInterval(new DateHistogramInterval("1h")).offset("10m"), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); - }, - histo -> { - assertEquals(2, histo.getBuckets().size()); - - assertEquals(asZDT("2019-07-01T03:10:00"), histo.getBuckets().get(0).getKey()); - assertEquals(3, histo.getBuckets().get(0).getDocCount()); - - assertEquals(asZDT("2019-07-01T04:10:00"), histo.getBuckets().get(1).getKey()); - assertEquals(2, histo.getBuckets().get(1).getDocCount()); - - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } - ); + testCase(new MatchAllDocsQuery(), builder -> builder.fixedInterval(new DateHistogramInterval("1h")).offset("10m"), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); + }, histo -> { + assertEquals(2, histo.getBuckets().size()); + + assertEquals(asZDT("2019-07-01T03:10:00"), histo.getBuckets().get(0).getKey()); + assertEquals(3, histo.getBuckets().get(0).getDocCount()); + + assertEquals(asZDT("2019-07-01T04:10:00"), histo.getBuckets().get(1).getKey()); + assertEquals(2, histo.getBuckets().get(1).getDocCount()); + + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); } /* * Test that when incrementing the rounded bucket key, offsets are correctly taken into account at the <1hour scale */ public void testNextRoundingValueOffsetHours() throws Exception { - RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T03:15:00"), - asLong("2019-07-01T03:20:00"), true, true); - RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T04:15:00"), - asLong("2019-07-01T04:20:00"), true, true); - RangeFieldMapper.Range range3 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T05:15:00"), - asLong("2019-07-01T05:20:00"), true, true); - RangeFieldMapper.Range range4 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T06:15:00"), - asLong("2019-07-01T06:20:00"), true, true); - RangeFieldMapper.Range range5 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T07:15:00"), - asLong("2019-07-01T07:20:00"), true, true); - RangeFieldMapper.Range range6 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T08:15:00"), - asLong("2019-07-01T08:20:00"), true, true); + RangeFieldMapper.Range range1 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T03:15:00"), + asLong("2019-07-01T03:20:00"), + true, + true + ); + RangeFieldMapper.Range range2 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T04:15:00"), + asLong("2019-07-01T04:20:00"), + true, + true + ); + RangeFieldMapper.Range range3 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T05:15:00"), + asLong("2019-07-01T05:20:00"), + true, + true + ); + RangeFieldMapper.Range range4 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T06:15:00"), + asLong("2019-07-01T06:20:00"), + true, + true + ); + RangeFieldMapper.Range range5 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T07:15:00"), + asLong("2019-07-01T07:20:00"), + true, + true + ); + RangeFieldMapper.Range range6 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T08:15:00"), + asLong("2019-07-01T08:20:00"), + true, + true + ); - testCase( - new MatchAllDocsQuery(), - builder -> builder.fixedInterval(new DateHistogramInterval("1h")).offset("13m"), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); - }, - histo -> { - assertEquals(6, histo.getBuckets().size()); + testCase(new MatchAllDocsQuery(), builder -> builder.fixedInterval(new DateHistogramInterval("1h")).offset("13m"), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); + }, histo -> { + assertEquals(6, histo.getBuckets().size()); - assertEquals(asZDT("2019-07-01T03:13:00"), histo.getBuckets().get(0).getKey()); - assertEquals(1, histo.getBuckets().get(0).getDocCount()); + assertEquals(asZDT("2019-07-01T03:13:00"), histo.getBuckets().get(0).getKey()); + assertEquals(1, histo.getBuckets().get(0).getDocCount()); - assertEquals(asZDT("2019-07-01T04:13:00"), histo.getBuckets().get(1).getKey()); - assertEquals(1, histo.getBuckets().get(1).getDocCount()); + assertEquals(asZDT("2019-07-01T04:13:00"), histo.getBuckets().get(1).getKey()); + assertEquals(1, histo.getBuckets().get(1).getDocCount()); - assertEquals(asZDT("2019-07-01T05:13:00"), histo.getBuckets().get(2).getKey()); - assertEquals(1, histo.getBuckets().get(2).getDocCount()); + assertEquals(asZDT("2019-07-01T05:13:00"), histo.getBuckets().get(2).getKey()); + assertEquals(1, histo.getBuckets().get(2).getDocCount()); - assertEquals(asZDT("2019-07-01T06:13:00"), histo.getBuckets().get(3).getKey()); - assertEquals(1, histo.getBuckets().get(3).getDocCount()); + assertEquals(asZDT("2019-07-01T06:13:00"), histo.getBuckets().get(3).getKey()); + assertEquals(1, histo.getBuckets().get(3).getDocCount()); - assertEquals(asZDT("2019-07-01T07:13:00"), histo.getBuckets().get(4).getKey()); - assertEquals(1, histo.getBuckets().get(4).getDocCount()); + assertEquals(asZDT("2019-07-01T07:13:00"), histo.getBuckets().get(4).getKey()); + assertEquals(1, histo.getBuckets().get(4).getDocCount()); - assertEquals(asZDT("2019-07-01T08:13:00"), histo.getBuckets().get(5).getKey()); - assertEquals(1, histo.getBuckets().get(5).getDocCount()); + assertEquals(asZDT("2019-07-01T08:13:00"), histo.getBuckets().get(5).getKey()); + assertEquals(1, histo.getBuckets().get(5).getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } - ); + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); - testCase( - new MatchAllDocsQuery(), - builder -> builder.calendarInterval(DateHistogramInterval.HOUR).offset("13m"), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); - }, - histo -> { - assertEquals(6, histo.getBuckets().size()); + testCase(new MatchAllDocsQuery(), builder -> builder.calendarInterval(DateHistogramInterval.HOUR).offset("13m"), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); + }, histo -> { + assertEquals(6, histo.getBuckets().size()); - assertEquals(asZDT("2019-07-01T03:13:00"), histo.getBuckets().get(0).getKey()); - assertEquals(1, histo.getBuckets().get(0).getDocCount()); + assertEquals(asZDT("2019-07-01T03:13:00"), histo.getBuckets().get(0).getKey()); + assertEquals(1, histo.getBuckets().get(0).getDocCount()); - assertEquals(asZDT("2019-07-01T04:13:00"), histo.getBuckets().get(1).getKey()); - assertEquals(1, histo.getBuckets().get(1).getDocCount()); + assertEquals(asZDT("2019-07-01T04:13:00"), histo.getBuckets().get(1).getKey()); + assertEquals(1, histo.getBuckets().get(1).getDocCount()); - assertEquals(asZDT("2019-07-01T05:13:00"), histo.getBuckets().get(2).getKey()); - assertEquals(1, histo.getBuckets().get(2).getDocCount()); + assertEquals(asZDT("2019-07-01T05:13:00"), histo.getBuckets().get(2).getKey()); + assertEquals(1, histo.getBuckets().get(2).getDocCount()); - assertEquals(asZDT("2019-07-01T06:13:00"), histo.getBuckets().get(3).getKey()); - assertEquals(1, histo.getBuckets().get(3).getDocCount()); + assertEquals(asZDT("2019-07-01T06:13:00"), histo.getBuckets().get(3).getKey()); + assertEquals(1, histo.getBuckets().get(3).getDocCount()); - assertEquals(asZDT("2019-07-01T07:13:00"), histo.getBuckets().get(4).getKey()); - assertEquals(1, histo.getBuckets().get(4).getDocCount()); + assertEquals(asZDT("2019-07-01T07:13:00"), histo.getBuckets().get(4).getKey()); + assertEquals(1, histo.getBuckets().get(4).getDocCount()); - assertEquals(asZDT("2019-07-01T08:13:00"), histo.getBuckets().get(5).getKey()); - assertEquals(1, histo.getBuckets().get(5).getDocCount()); + assertEquals(asZDT("2019-07-01T08:13:00"), histo.getBuckets().get(5).getKey()); + assertEquals(1, histo.getBuckets().get(5).getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } - ); + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); } /* @@ -399,266 +476,386 @@ public void testNextRoundingValueOffsetHours() throws Exception { * offset is on time scale */ public void testNextRoundingValueOffsetDays() throws Exception { - RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-01T03:15:00"), - asLong("2019-07-01T03:20:00"), true, true); - RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-02T04:15:00"), - asLong("2019-07-02T04:20:00"), true, true); - RangeFieldMapper.Range range3 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-03T05:15:00"), - asLong("2019-07-03T05:20:00"), true, true); - RangeFieldMapper.Range range4 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-04T06:15:00"), - asLong("2019-07-04T06:20:00"), true, true); - RangeFieldMapper.Range range5 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-05T07:15:00"), - asLong("2019-07-05T07:20:00"), true, true); - RangeFieldMapper.Range range6 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-07-06T08:15:00"), - asLong("2019-07-06T08:20:00"), true, true); + RangeFieldMapper.Range range1 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-01T03:15:00"), + asLong("2019-07-01T03:20:00"), + true, + true + ); + RangeFieldMapper.Range range2 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-02T04:15:00"), + asLong("2019-07-02T04:20:00"), + true, + true + ); + RangeFieldMapper.Range range3 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-03T05:15:00"), + asLong("2019-07-03T05:20:00"), + true, + true + ); + RangeFieldMapper.Range range4 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-04T06:15:00"), + asLong("2019-07-04T06:20:00"), + true, + true + ); + RangeFieldMapper.Range range5 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-05T07:15:00"), + asLong("2019-07-05T07:20:00"), + true, + true + ); + RangeFieldMapper.Range range6 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-07-06T08:15:00"), + asLong("2019-07-06T08:20:00"), + true, + true + ); - testCase( - new MatchAllDocsQuery(), - builder -> builder.fixedInterval(new DateHistogramInterval("1d")).offset("36h"), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); - }, - histo -> { - assertEquals(6, histo.getBuckets().size()); + testCase(new MatchAllDocsQuery(), builder -> builder.fixedInterval(new DateHistogramInterval("1d")).offset("36h"), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); + }, histo -> { + assertEquals(6, histo.getBuckets().size()); - assertEquals(asZDT("2019-06-30T12:00:00"), histo.getBuckets().get(0).getKey()); - assertEquals(1, histo.getBuckets().get(0).getDocCount()); + assertEquals(asZDT("2019-06-30T12:00:00"), histo.getBuckets().get(0).getKey()); + assertEquals(1, histo.getBuckets().get(0).getDocCount()); - assertEquals(asZDT("2019-07-01T12:00:00"), histo.getBuckets().get(1).getKey()); - assertEquals(1, histo.getBuckets().get(1).getDocCount()); + assertEquals(asZDT("2019-07-01T12:00:00"), histo.getBuckets().get(1).getKey()); + assertEquals(1, histo.getBuckets().get(1).getDocCount()); - assertEquals(asZDT("2019-07-02T12:00:00"), histo.getBuckets().get(2).getKey()); - assertEquals(1, histo.getBuckets().get(2).getDocCount()); + assertEquals(asZDT("2019-07-02T12:00:00"), histo.getBuckets().get(2).getKey()); + assertEquals(1, histo.getBuckets().get(2).getDocCount()); - assertEquals(asZDT("2019-07-03T12:00:00"), histo.getBuckets().get(3).getKey()); - assertEquals(1, histo.getBuckets().get(3).getDocCount()); + assertEquals(asZDT("2019-07-03T12:00:00"), histo.getBuckets().get(3).getKey()); + assertEquals(1, histo.getBuckets().get(3).getDocCount()); - assertEquals(asZDT("2019-07-04T12:00:00"), histo.getBuckets().get(4).getKey()); - assertEquals(1, histo.getBuckets().get(4).getDocCount()); + assertEquals(asZDT("2019-07-04T12:00:00"), histo.getBuckets().get(4).getKey()); + assertEquals(1, histo.getBuckets().get(4).getDocCount()); - assertEquals(asZDT("2019-07-05T12:00:00"), histo.getBuckets().get(5).getKey()); - assertEquals(1, histo.getBuckets().get(5).getDocCount()); + assertEquals(asZDT("2019-07-05T12:00:00"), histo.getBuckets().get(5).getKey()); + assertEquals(1, histo.getBuckets().get(5).getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } - ); + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); - testCase( - new MatchAllDocsQuery(), - builder -> builder.calendarInterval(DateHistogramInterval.DAY).offset("12h"), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); - }, - histo -> { - assertEquals(6, histo.getBuckets().size()); + testCase(new MatchAllDocsQuery(), builder -> builder.calendarInterval(DateHistogramInterval.DAY).offset("12h"), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); + }, histo -> { + assertEquals(6, histo.getBuckets().size()); - assertEquals(asZDT("2019-06-30T12:00:00"), histo.getBuckets().get(0).getKey()); - assertEquals(1, histo.getBuckets().get(0).getDocCount()); + assertEquals(asZDT("2019-06-30T12:00:00"), histo.getBuckets().get(0).getKey()); + assertEquals(1, histo.getBuckets().get(0).getDocCount()); - assertEquals(asZDT("2019-07-01T12:00:00"), histo.getBuckets().get(1).getKey()); - assertEquals(1, histo.getBuckets().get(1).getDocCount()); + assertEquals(asZDT("2019-07-01T12:00:00"), histo.getBuckets().get(1).getKey()); + assertEquals(1, histo.getBuckets().get(1).getDocCount()); - assertEquals(asZDT("2019-07-02T12:00:00"), histo.getBuckets().get(2).getKey()); - assertEquals(1, histo.getBuckets().get(2).getDocCount()); + assertEquals(asZDT("2019-07-02T12:00:00"), histo.getBuckets().get(2).getKey()); + assertEquals(1, histo.getBuckets().get(2).getDocCount()); - assertEquals(asZDT("2019-07-03T12:00:00"), histo.getBuckets().get(3).getKey()); - assertEquals(1, histo.getBuckets().get(3).getDocCount()); + assertEquals(asZDT("2019-07-03T12:00:00"), histo.getBuckets().get(3).getKey()); + assertEquals(1, histo.getBuckets().get(3).getDocCount()); - assertEquals(asZDT("2019-07-04T12:00:00"), histo.getBuckets().get(4).getKey()); - assertEquals(1, histo.getBuckets().get(4).getDocCount()); + assertEquals(asZDT("2019-07-04T12:00:00"), histo.getBuckets().get(4).getKey()); + assertEquals(1, histo.getBuckets().get(4).getDocCount()); - assertEquals(asZDT("2019-07-05T12:00:00"), histo.getBuckets().get(5).getKey()); - assertEquals(1, histo.getBuckets().get(5).getDocCount()); + assertEquals(asZDT("2019-07-05T12:00:00"), histo.getBuckets().get(5).getKey()); + assertEquals(1, histo.getBuckets().get(5).getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } - ); + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); } public void testMinDocCount() throws Exception { - RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-01T12:14:36"), - asLong("2019-08-01T15:07:22"), true, true); - RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T12:14:36"), - asLong("2019-08-02T15:07:22"), true, true); - RangeFieldMapper.Range range3 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T12:14:36"), - asLong("2019-08-02T15:07:22"), true, true); - RangeFieldMapper.Range range4 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T12:14:36"), - asLong("2019-08-03T15:07:22"), true, true); - - // Guard case, make sure the agg buckets as expected without min doc count - testCase( - new MatchAllDocsQuery(), - builder -> builder.calendarInterval(DateHistogramInterval.DAY), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - }, - histo -> { - assertEquals(3, histo.getBuckets().size()); - - assertEquals(asZDT("2019-08-01T00:00:00"), histo.getBuckets().get(0).getKey()); - assertEquals(1, histo.getBuckets().get(0).getDocCount()); - - assertEquals(asZDT("2019-08-02T00:00:00"), histo.getBuckets().get(1).getKey()); - assertEquals(3, histo.getBuckets().get(1).getDocCount()); - - assertEquals(asZDT("2019-08-03T00:00:00"), histo.getBuckets().get(2).getKey()); - assertEquals(1, histo.getBuckets().get(2).getDocCount()); - - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } + RangeFieldMapper.Range range1 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-01T12:14:36"), + asLong("2019-08-01T15:07:22"), + true, + true ); - - testCase( - new MatchAllDocsQuery(), - builder -> builder.calendarInterval(DateHistogramInterval.DAY).minDocCount(2), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - }, - histo -> { - assertEquals(1, histo.getBuckets().size()); - - assertEquals(asZDT("2019-08-02T00:00:00"), histo.getBuckets().get(0).getKey()); - assertEquals(3, histo.getBuckets().get(0).getDocCount()); - - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } + RangeFieldMapper.Range range2 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T12:14:36"), + asLong("2019-08-02T15:07:22"), + true, + true + ); + RangeFieldMapper.Range range3 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T12:14:36"), + asLong("2019-08-02T15:07:22"), + true, + true ); + RangeFieldMapper.Range range4 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T12:14:36"), + asLong("2019-08-03T15:07:22"), + true, + true + ); + + // Guard case, make sure the agg buckets as expected without min doc count + testCase(new MatchAllDocsQuery(), builder -> builder.calendarInterval(DateHistogramInterval.DAY), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + }, histo -> { + assertEquals(3, histo.getBuckets().size()); + + assertEquals(asZDT("2019-08-01T00:00:00"), histo.getBuckets().get(0).getKey()); + assertEquals(1, histo.getBuckets().get(0).getDocCount()); + + assertEquals(asZDT("2019-08-02T00:00:00"), histo.getBuckets().get(1).getKey()); + assertEquals(3, histo.getBuckets().get(1).getDocCount()); + + assertEquals(asZDT("2019-08-03T00:00:00"), histo.getBuckets().get(2).getKey()); + assertEquals(1, histo.getBuckets().get(2).getDocCount()); + + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); + + testCase(new MatchAllDocsQuery(), builder -> builder.calendarInterval(DateHistogramInterval.DAY).minDocCount(2), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + }, histo -> { + assertEquals(1, histo.getBuckets().size()); + + assertEquals(asZDT("2019-08-02T00:00:00"), histo.getBuckets().get(0).getKey()); + assertEquals(3, histo.getBuckets().get(0).getDocCount()); + + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); } public void testIntersectQuery() throws Exception { - RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:15:00"), - asLong("2019-08-02T02:45:00"), true, true); - RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T05:15:00"), - asLong("2019-08-02T05:45:00"), true, true); - - RangeFieldMapper.Range range3 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T03:15:00"), - asLong("2019-08-02T03:45:00"), true, true); - RangeFieldMapper.Range range4 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T04:15:00"), - asLong("2019-08-02T04:45:00"), true, true); - RangeFieldMapper.Range range5 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T03:30:00"), - asLong("2019-08-02T04:30:00"), true, true); + RangeFieldMapper.Range range1 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T02:15:00"), + asLong("2019-08-02T02:45:00"), + true, + true + ); + RangeFieldMapper.Range range2 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T05:15:00"), + asLong("2019-08-02T05:45:00"), + true, + true + ); - RangeFieldMapper.Range range6 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:15:00"), - asLong("2019-08-02T03:45:00"), true, true); - RangeFieldMapper.Range range7 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T04:15:00"), - asLong("2019-08-02T05:45:00"), true, true); - RangeFieldMapper.Range range8 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:30:00"), - asLong("2019-08-02T05:30:00"), true, true); + RangeFieldMapper.Range range3 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T03:15:00"), + asLong("2019-08-02T03:45:00"), + true, + true + ); + RangeFieldMapper.Range range4 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T04:15:00"), + asLong("2019-08-02T04:45:00"), + true, + true + ); + RangeFieldMapper.Range range5 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T03:30:00"), + asLong("2019-08-02T04:30:00"), + true, + true + ); - Query query = RangeType.DATE.dvRangeQuery(FIELD_NAME, BinaryDocValuesRangeQuery.QueryType.INTERSECTS, asLong("2019-08-02T03:00:00"), - asLong("2019-08-02T05:00:00"), true, true); + RangeFieldMapper.Range range6 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T02:15:00"), + asLong("2019-08-02T03:45:00"), + true, + true + ); + RangeFieldMapper.Range range7 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T04:15:00"), + asLong("2019-08-02T05:45:00"), + true, + true + ); + RangeFieldMapper.Range range8 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T02:30:00"), + asLong("2019-08-02T05:30:00"), + true, + true + ); + Query query = RangeType.DATE.dvRangeQuery( + FIELD_NAME, + BinaryDocValuesRangeQuery.QueryType.INTERSECTS, + asLong("2019-08-02T03:00:00"), + asLong("2019-08-02T05:00:00"), + true, + true + ); - testCase( - query, - builder -> builder.calendarInterval(DateHistogramInterval.HOUR).minDocCount(2), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range7))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range8))))); - }, - histo -> { - assertEquals(4, histo.getBuckets().size()); + testCase(query, builder -> builder.calendarInterval(DateHistogramInterval.HOUR).minDocCount(2), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range7))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range8))))); + }, histo -> { + assertEquals(4, histo.getBuckets().size()); - assertEquals(asZDT("2019-08-02T02:00:00"), histo.getBuckets().get(0).getKey()); - assertEquals(2, histo.getBuckets().get(0).getDocCount()); + assertEquals(asZDT("2019-08-02T02:00:00"), histo.getBuckets().get(0).getKey()); + assertEquals(2, histo.getBuckets().get(0).getDocCount()); - assertEquals(asZDT("2019-08-02T03:00:00"), histo.getBuckets().get(1).getKey()); - assertEquals(4, histo.getBuckets().get(1).getDocCount()); + assertEquals(asZDT("2019-08-02T03:00:00"), histo.getBuckets().get(1).getKey()); + assertEquals(4, histo.getBuckets().get(1).getDocCount()); - assertEquals(asZDT("2019-08-02T04:00:00"), histo.getBuckets().get(2).getKey()); - assertEquals(4, histo.getBuckets().get(2).getDocCount()); + assertEquals(asZDT("2019-08-02T04:00:00"), histo.getBuckets().get(2).getKey()); + assertEquals(4, histo.getBuckets().get(2).getDocCount()); - assertEquals(asZDT("2019-08-02T05:00:00"), histo.getBuckets().get(3).getKey()); - assertEquals(2, histo.getBuckets().get(3).getDocCount()); + assertEquals(asZDT("2019-08-02T05:00:00"), histo.getBuckets().get(3).getKey()); + assertEquals(2, histo.getBuckets().get(3).getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } - ); + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); } public void testWithinQuery() throws Exception { - RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:15:00"), - asLong("2019-08-02T02:45:00"), true, true); - RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T05:15:00"), - asLong("2019-08-02T05:45:00"), true, true); - - RangeFieldMapper.Range range3 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T03:15:00"), - asLong("2019-08-02T03:45:00"), true, true); - RangeFieldMapper.Range range4 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T04:15:00"), - asLong("2019-08-02T04:45:00"), true, true); - RangeFieldMapper.Range range5 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T03:30:00"), - asLong("2019-08-02T04:30:00"), true, true); - - RangeFieldMapper.Range range6 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:15:00"), - asLong("2019-08-02T03:45:00"), true, true); - RangeFieldMapper.Range range7 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T04:15:00"), - asLong("2019-08-02T05:45:00"), true, true); - RangeFieldMapper.Range range8 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:30:00"), - asLong("2019-08-02T05:30:00"), true, true); - - Query query = RangeType.DATE.dvRangeQuery(FIELD_NAME, BinaryDocValuesRangeQuery.QueryType.WITHIN, asLong("2019-08-02T03:00:00"), - asLong("2019-08-02T05:00:00"), true, true); - - - testCase( - query, - builder -> builder.calendarInterval(DateHistogramInterval.HOUR).minDocCount(2), - writer -> { - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range7))))); - writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range8))))); - }, - histo -> { - assertEquals(2, histo.getBuckets().size()); + RangeFieldMapper.Range range1 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T02:15:00"), + asLong("2019-08-02T02:45:00"), + true, + true + ); + RangeFieldMapper.Range range2 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T05:15:00"), + asLong("2019-08-02T05:45:00"), + true, + true + ); - assertEquals(asZDT("2019-08-02T03:00:00"), histo.getBuckets().get(0).getKey()); - assertEquals(2, histo.getBuckets().get(0).getDocCount()); + RangeFieldMapper.Range range3 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T03:15:00"), + asLong("2019-08-02T03:45:00"), + true, + true + ); + RangeFieldMapper.Range range4 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T04:15:00"), + asLong("2019-08-02T04:45:00"), + true, + true + ); + RangeFieldMapper.Range range5 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T03:30:00"), + asLong("2019-08-02T04:30:00"), + true, + true + ); - assertEquals(asZDT("2019-08-02T04:00:00"), histo.getBuckets().get(1).getKey()); - assertEquals(2, histo.getBuckets().get(1).getDocCount()); + RangeFieldMapper.Range range6 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T02:15:00"), + asLong("2019-08-02T03:45:00"), + true, + true + ); + RangeFieldMapper.Range range7 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T04:15:00"), + asLong("2019-08-02T05:45:00"), + true, + true + ); + RangeFieldMapper.Range range8 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T02:30:00"), + asLong("2019-08-02T05:30:00"), + true, + true + ); - assertTrue(AggregationInspectionHelper.hasValue(histo)); - } + Query query = RangeType.DATE.dvRangeQuery( + FIELD_NAME, + BinaryDocValuesRangeQuery.QueryType.WITHIN, + asLong("2019-08-02T03:00:00"), + asLong("2019-08-02T05:00:00"), + true, + true ); + + testCase(query, builder -> builder.calendarInterval(DateHistogramInterval.HOUR).minDocCount(2), writer -> { + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range3))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range4))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range5))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range6))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range7))))); + writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range8))))); + }, histo -> { + assertEquals(2, histo.getBuckets().size()); + + assertEquals(asZDT("2019-08-02T03:00:00"), histo.getBuckets().get(0).getKey()); + assertEquals(2, histo.getBuckets().get(0).getDocCount()); + + assertEquals(asZDT("2019-08-02T04:00:00"), histo.getBuckets().get(1).getKey()); + assertEquals(2, histo.getBuckets().get(1).getDocCount()); + + assertTrue(AggregationInspectionHelper.hasValue(histo)); + }); } public void testHardBounds() throws Exception { - RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:15:00"), - asLong("2019-08-02T05:45:00"), true, true); - RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T05:15:00"), - asLong("2019-08-02T17:45:00"), true, true); + RangeFieldMapper.Range range1 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T02:15:00"), + asLong("2019-08-02T05:45:00"), + true, + true + ); + RangeFieldMapper.Range range2 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T05:15:00"), + asLong("2019-08-02T17:45:00"), + true, + true + ); testCase( Queries.newMatchAllQuery(), - builder -> builder.calendarInterval(DateHistogramInterval.HOUR).hardBounds( - new LongBounds("2019-08-02T03:00:00", "2019-08-02T10:00:00")), + builder -> builder.calendarInterval(DateHistogramInterval.HOUR) + .hardBounds(new LongBounds("2019-08-02T03:00:00", "2019-08-02T10:00:00")), writer -> { writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); @@ -679,16 +876,27 @@ public void testHardBounds() throws Exception { } ); } + public void testHardBoundsWithOpenRanges() throws Exception { - RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, Long.MIN_VALUE, - asLong("2019-08-02T05:45:00"), true, true); - RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T05:15:00"), - Long.MAX_VALUE, true, true); + RangeFieldMapper.Range range1 = new RangeFieldMapper.Range( + RangeType.DATE, + Long.MIN_VALUE, + asLong("2019-08-02T05:45:00"), + true, + true + ); + RangeFieldMapper.Range range2 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T05:15:00"), + Long.MAX_VALUE, + true, + true + ); testCase( Queries.newMatchAllQuery(), - builder -> builder.calendarInterval(DateHistogramInterval.HOUR).hardBounds( - new LongBounds("2019-08-02T03:00:00", "2019-08-02T10:00:00")), + builder -> builder.calendarInterval(DateHistogramInterval.HOUR) + .hardBounds(new LongBounds("2019-08-02T03:00:00", "2019-08-02T10:00:00")), writer -> { writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1))))); writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2))))); @@ -711,10 +919,20 @@ public void testHardBoundsWithOpenRanges() throws Exception { } public void testBothBounds() throws Exception { - RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:15:00"), - asLong("2019-08-02T05:45:00"), true, true); - RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T05:15:00"), - asLong("2019-08-02T17:45:00"), true, true); + RangeFieldMapper.Range range1 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T02:15:00"), + asLong("2019-08-02T05:45:00"), + true, + true + ); + RangeFieldMapper.Range range2 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T05:15:00"), + asLong("2019-08-02T17:45:00"), + true, + true + ); testCase( Queries.newMatchAllQuery(), @@ -744,28 +962,44 @@ public void testBothBounds() throws Exception { public void testOverlappingBounds() throws Exception { - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> testCase( - Queries.newMatchAllQuery(), - builder -> builder.calendarInterval(DateHistogramInterval.HOUR) - .hardBounds(new LongBounds("2019-08-02T01:00:00", "2019-08-02T08:00:00")) - .extendedBounds(new LongBounds("2019-08-02T00:00:00", "2019-08-02T10:00:00")), - writer -> { - - }, - histo -> { - fail("Shouldn't be here"); - } - )); + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> testCase( + Queries.newMatchAllQuery(), + builder -> builder.calendarInterval(DateHistogramInterval.HOUR) + .hardBounds(new LongBounds("2019-08-02T01:00:00", "2019-08-02T08:00:00")) + .extendedBounds(new LongBounds("2019-08-02T00:00:00", "2019-08-02T10:00:00")), + writer -> { + + }, + histo -> { fail("Shouldn't be here"); } + ) + ); - assertThat(ex.getMessage(), equalTo("Extended bounds have to be inside hard bounds, " + - "hard bounds: [2019-08-02T01:00:00--2019-08-02T08:00:00], extended bounds: [2019-08-02T00:00:00--2019-08-02T10:00:00]")); + assertThat( + ex.getMessage(), + equalTo( + "Extended bounds have to be inside hard bounds, " + + "hard bounds: [2019-08-02T01:00:00--2019-08-02T08:00:00], extended bounds: [2019-08-02T00:00:00--2019-08-02T10:00:00]" + ) + ); } public void testEqualBounds() throws Exception { - RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:15:00"), - asLong("2019-08-02T05:45:00"), true, true); - RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T05:15:00"), - asLong("2019-08-02T17:45:00"), true, true); + RangeFieldMapper.Range range1 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T02:15:00"), + asLong("2019-08-02T05:45:00"), + true, + true + ); + RangeFieldMapper.Range range2 = new RangeFieldMapper.Range( + RangeType.DATE, + asLong("2019-08-02T05:15:00"), + asLong("2019-08-02T17:45:00"), + true, + true + ); testCase( Queries.newMatchAllQuery(), @@ -793,10 +1027,12 @@ public void testEqualBounds() throws Exception { ); } - private void testCase(Query query, - Consumer configure, - CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testCase( + Query query, + Consumer configure, + CheckedConsumer buildIndex, + Consumer verify + ) throws IOException { MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(FIELD_NAME, RangeFieldMapper.Defaults.DATE_FORMATTER); final DateHistogramAggregationBuilder aggregationBuilder = new DateHistogramAggregationBuilder("_name").field(FIELD_NAME); if (configure != null) { @@ -805,11 +1041,14 @@ private void testCase(Query query, testCase(aggregationBuilder, query, buildIndex, verify, fieldType); } - private void testCase(DateHistogramAggregationBuilder aggregationBuilder, Query query, - CheckedConsumer buildIndex, Consumer verify, - MappedFieldType fieldType) throws IOException { - try(Directory directory = newDirectory(); - RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + private void testCase( + DateHistogramAggregationBuilder aggregationBuilder, + Query query, + CheckedConsumer buildIndex, + Consumer verify, + MappedFieldType fieldType + ) throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { buildIndex.accept(indexWriter); indexWriter.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index f211a6cc009c8..390427f2113a4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -64,9 +64,7 @@ protected InternalAutoDateHistogram createTestInstance( } @Override - protected InternalAutoDateHistogram createTestInstance(String name, - Map metadata, - InternalAggregations aggregations) { + protected InternalAutoDateHistogram createTestInstance(String name, Map metadata, InternalAggregations aggregations) { RoundingInfo[] roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null, null); int roundingIndex = between(0, roundingInfos.length - 1); return createTestInstance( @@ -90,19 +88,21 @@ public void testGetAppropriateRoundingUsesCorrectIntervals() { // Since we pass 0 as the starting index to getAppropriateRounding, we'll also use // an innerInterval that is quite large, such that targetBuckets * roundings[i].getMaximumInnerInterval() // will be larger than the estimate. - roundings[0] = new RoundingInfo(Rounding.DateTimeUnit.SECOND_OF_MINUTE, timeZone, - 1000L, "s", 1000); - roundings[1] = new RoundingInfo(Rounding.DateTimeUnit.MINUTES_OF_HOUR, timeZone, - 60 * 1000L, "m", 1, 5, 10, 30); - roundings[2] = new RoundingInfo(Rounding.DateTimeUnit.HOUR_OF_DAY, timeZone, - 60 * 60 * 1000L, "h", 1, 3, 12); + roundings[0] = new RoundingInfo(Rounding.DateTimeUnit.SECOND_OF_MINUTE, timeZone, 1000L, "s", 1000); + roundings[1] = new RoundingInfo(Rounding.DateTimeUnit.MINUTES_OF_HOUR, timeZone, 60 * 1000L, "m", 1, 5, 10, 30); + roundings[2] = new RoundingInfo(Rounding.DateTimeUnit.HOUR_OF_DAY, timeZone, 60 * 60 * 1000L, "h", 1, 3, 12); OffsetDateTime timestamp = Instant.parse("2018-01-01T00:00:01.000Z").atOffset(ZoneOffset.UTC); // We want to pass a roundingIdx of zero, because in order to reproduce this bug, we need the function // to increment the rounding (because the bug was that the function would not use the innerIntervals // from the new rounding. - int result = InternalAutoDateHistogram.getAppropriateRounding(timestamp.toEpochSecond()*1000, - timestamp.plusDays(1).toEpochSecond()*1000, 0, roundings, 25); + int result = InternalAutoDateHistogram.getAppropriateRounding( + timestamp.toEpochSecond() * 1000, + timestamp.plusDays(1).toEpochSecond() * 1000, + 0, + roundings, + 25 + ); assertThat(result, equalTo(2)); } @@ -115,7 +115,7 @@ protected List randomResultsToReduce(String name, int List result = new ArrayList<>(size); for (int i = 0; i < size; i++) { long thisResultStart = startingDate; - thisResultStart += usually() ? 0 :randomFrom(TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS).toMillis(between(1, 10000)); + thisResultStart += usually() ? 0 : randomFrom(TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS).toMillis(between(1, 10000)); result.add(createTestInstance(name, null, InternalAggregations.EMPTY, thisResultStart, roundingInfos, roundingIndex, format)); } return result; @@ -153,7 +153,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List= 0; j--) { + for (int j = roundingInfo.innerIntervals.length - 1; j >= 0; j--) { int interval = roundingInfo.innerIntervals[j]; if (normalizedDuration / interval < reduced.getBuckets().size()) { innerIntervalIndex = j; @@ -203,8 +203,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List= keyForBucket && roundedBucketKey < nextKey) { - expectedCounts.compute(key, - (k, oldValue) -> (oldValue == null ? 0 : oldValue) + docCount); + expectedCounts.compute(key, (k, oldValue) -> (oldValue == null ? 0 : oldValue) + docCount); } } } @@ -221,8 +220,10 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List actualCounts = new TreeMap<>(); for (Histogram.Bucket bucket : reduced.getBuckets()) { - actualCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant(), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + actualCounts.compute( + ((ZonedDateTime) bucket.getKey()).toInstant(), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount() + ); } assertEquals(expectedCounts, actualCounts); @@ -230,7 +231,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - buckets = new ArrayList<>(buckets); - buckets.add(new InternalAutoDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), instance.getFormatter(), - InternalAggregations.EMPTY)); - break; - case 2: - int roundingIdx = bucketInfo.roundingIdx == bucketInfo.roundingInfos.length - 1 ? 0 : bucketInfo.roundingIdx + 1; - bucketInfo = new BucketInfo(bucketInfo.roundingInfos, roundingIdx, bucketInfo.emptySubAggregations); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + buckets = new ArrayList<>(buckets); + buckets.add( + new InternalAutoDateHistogram.Bucket( + randomNonNegativeLong(), + randomIntBetween(1, 100), + instance.getFormatter(), + InternalAggregations.EMPTY + ) + ); + break; + case 2: + int roundingIdx = bucketInfo.roundingIdx == bucketInfo.roundingInfos.length - 1 ? 0 : bucketInfo.roundingIdx + 1; + bucketInfo = new BucketInfo(bucketInfo.roundingInfos, roundingIdx, bucketInfo.emptySubAggregations); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, instance.getFormatter(), metadata, 1); } public void testReduceSecond() { - InternalAutoDateHistogram h = new ReduceTestBuilder(10) - .bucket("1970-01-01T00:00:01", 1).bucket("1970-01-01T00:00:02", 1).bucket("1970-01-01T00:00:03", 1) + InternalAutoDateHistogram h = new ReduceTestBuilder(10).bucket("1970-01-01T00:00:01", 1) + .bucket("1970-01-01T00:00:02", 1) + .bucket("1970-01-01T00:00:03", 1) .finishShardResult("s", 1) - .bucket("1970-01-01T00:00:03", 1).bucket("1970-01-01T00:00:04", 1) + .bucket("1970-01-01T00:00:03", 1) + .bucket("1970-01-01T00:00:04", 1) .finishShardResult("s", 1) .reduce(); - assertThat(keys(h), equalTo(Arrays.asList( - "1970-01-01T00:00:01Z", "1970-01-01T00:00:02Z", "1970-01-01T00:00:03Z", "1970-01-01T00:00:04Z"))); + assertThat( + keys(h), + equalTo(Arrays.asList("1970-01-01T00:00:01Z", "1970-01-01T00:00:02Z", "1970-01-01T00:00:03Z", "1970-01-01T00:00:04Z")) + ); assertThat(docCounts(h), equalTo(Arrays.asList(1, 1, 2, 1))); } public void testReduceThirtySeconds() { - InternalAutoDateHistogram h = new ReduceTestBuilder(10) - .bucket("1970-01-01T00:00:00", 1).bucket("1970-01-01T00:00:30", 1).bucket("1970-01-01T00:02:00", 1) + InternalAutoDateHistogram h = new ReduceTestBuilder(10).bucket("1970-01-01T00:00:00", 1) + .bucket("1970-01-01T00:00:30", 1) + .bucket("1970-01-01T00:02:00", 1) .finishShardResult("s", 1) - .bucket("1970-01-01T00:00:30", 1).bucket("1970-01-01T00:01:00", 1) + .bucket("1970-01-01T00:00:30", 1) + .bucket("1970-01-01T00:01:00", 1) .finishShardResult("s", 1) .reduce(); - assertThat(keys(h), equalTo(Arrays.asList( - "1970-01-01T00:00:00Z", "1970-01-01T00:00:30Z", "1970-01-01T00:01:00Z", "1970-01-01T00:01:30Z", "1970-01-01T00:02:00Z"))); + assertThat( + keys(h), + equalTo( + Arrays.asList( + "1970-01-01T00:00:00Z", + "1970-01-01T00:00:30Z", + "1970-01-01T00:01:00Z", + "1970-01-01T00:01:30Z", + "1970-01-01T00:02:00Z" + ) + ) + ); assertThat(docCounts(h), equalTo(Arrays.asList(1, 2, 1, 0, 1))); } public void testReduceBumpsInnerRange() { - InternalAutoDateHistogram h = new ReduceTestBuilder(2) - .bucket("1970-01-01T00:00:01", 1).bucket("1970-01-01T00:00:02", 1) + InternalAutoDateHistogram h = new ReduceTestBuilder(2).bucket("1970-01-01T00:00:01", 1) + .bucket("1970-01-01T00:00:02", 1) .finishShardResult("s", 1) - .bucket("1970-01-01T00:00:00", 1).bucket("1970-01-01T00:00:05", 1) + .bucket("1970-01-01T00:00:00", 1) + .bucket("1970-01-01T00:00:05", 1) .finishShardResult("s", 5) .reduce(); assertThat(keys(h), equalTo(Arrays.asList("1970-01-01T00:00:00Z", "1970-01-01T00:00:05Z"))); @@ -322,10 +346,11 @@ public void testReduceBumpsInnerRange() { } public void testReduceBumpsRounding() { - InternalAutoDateHistogram h = new ReduceTestBuilder(2) - .bucket("1970-01-01T00:00:01", 1).bucket("1970-01-01T00:00:02", 1) + InternalAutoDateHistogram h = new ReduceTestBuilder(2).bucket("1970-01-01T00:00:01", 1) + .bucket("1970-01-01T00:00:02", 1) .finishShardResult("s", 1) - .bucket("1970-01-01T00:00:00", 1).bucket("1970-01-01T00:01:00", 1) + .bucket("1970-01-01T00:00:00", 1) + .bucket("1970-01-01T00:01:00", 1) .finishShardResult("m", 1) .reduce(); assertThat(keys(h), equalTo(Arrays.asList("1970-01-01T00:00:00Z", "1970-01-01T00:01:00Z"))); @@ -334,7 +359,10 @@ public void testReduceBumpsRounding() { private static class ReduceTestBuilder { private static final DocValueFormat FORMAT = new DocValueFormat.DateTime( - DateFormatter.forPattern("date_time_no_millis"), ZoneOffset.UTC, DateFieldMapper.Resolution.MILLISECONDS); + DateFormatter.forPattern("date_time_no_millis"), + ZoneOffset.UTC, + DateFieldMapper.Resolution.MILLISECONDS + ); private final List results = new ArrayList<>(); private final List buckets = new ArrayList<>(); private final int targetBuckets; @@ -344,8 +372,7 @@ private static class ReduceTestBuilder { } ReduceTestBuilder bucket(String key, long docCount) { - buckets.add(new InternalAutoDateHistogram.Bucket( - utcMillis(key), docCount, FORMAT, InternalAggregations.EMPTY)); + buckets.add(new InternalAutoDateHistogram.Bucket(utcMillis(key), docCount, FORMAT, InternalAggregations.EMPTY)); return this; } @@ -359,11 +386,22 @@ ReduceTestBuilder finishShardResult(String whichRounding, int innerInterval) { } } assertThat("rounding [" + whichRounding + "] should be in " + Arrays.toString(roundings), roundingIdx, greaterThan(-1)); - assertTrue(Arrays.toString(roundings[roundingIdx].innerIntervals) + " must contain " + innerInterval, - Arrays.binarySearch(roundings[roundingIdx].innerIntervals, innerInterval) >= 0); + assertTrue( + Arrays.toString(roundings[roundingIdx].innerIntervals) + " must contain " + innerInterval, + Arrays.binarySearch(roundings[roundingIdx].innerIntervals, innerInterval) >= 0 + ); BucketInfo bucketInfo = new BucketInfo(roundings, roundingIdx, InternalAggregations.EMPTY); - results.add(new InternalAutoDateHistogram("test", new ArrayList<>(buckets), targetBuckets, bucketInfo, - FORMAT, emptyMap(), innerInterval)); + results.add( + new InternalAutoDateHistogram( + "test", + new ArrayList<>(buckets), + targetBuckets, + bucketInfo, + FORMAT, + emptyMap(), + innerInterval + ) + ); buckets.clear(); return this; } @@ -389,8 +427,14 @@ private List docCounts(InternalAutoDateHistogram h) { public void testCreateWithReplacementBuckets() { InternalAutoDateHistogram noInterval = createTestInstance(); InternalAutoDateHistogram orig = new InternalAutoDateHistogram( - noInterval.getName(), noInterval.getBuckets(), noInterval.getTargetBuckets(), noInterval.getBucketInfo(), - noInterval.getFormatter(), noInterval.getMetadata(), randomLong()); + noInterval.getName(), + noInterval.getBuckets(), + noInterval.getTargetBuckets(), + noInterval.getBucketInfo(), + noInterval.getFormatter(), + noInterval.getMetadata(), + randomLong() + ); InternalAutoDateHistogram copy = orig.create(List.of()); assertThat(copy.getName(), equalTo(orig.getName())); assertThat(copy.getBuckets(), hasSize(0)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java index bdee93908534b..9d0f37cc97c5f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java @@ -42,9 +42,9 @@ public void setUp() throws Exception { super.setUp(); keyed = randomBoolean(); format = randomNumericDocValueFormat(); - //in order for reduction to work properly (and be realistic) we need to use the same interval, minDocCount, emptyBucketInfo - //and base in all randomly created aggs as part of the same test run. This is particularly important when minDocCount is - //set to 0 as empty buckets need to be added to fill the holes. + // in order for reduction to work properly (and be realistic) we need to use the same interval, minDocCount, emptyBucketInfo + // and base in all randomly created aggs as part of the same test run. This is particularly important when minDocCount is + // set to 0 as empty buckets need to be added to fill the holes. long interval = randomIntBetween(1, 3); intervalMillis = randomFrom(timeValueSeconds(interval), timeValueMinutes(interval), timeValueHours(interval)).getMillis(); Rounding rounding = Rounding.builder(TimeValue.timeValueMillis(intervalMillis)).build(); @@ -57,8 +57,8 @@ public void setUp() throws Exception { minDocCount = 0; LongBounds extendedBounds = null; if (randomBoolean()) { - //it's ok if min and max are outside the range of the generated buckets, that will just mean that - //empty buckets won't be added before the first bucket and/or after the last one + // it's ok if min and max are outside the range of the generated buckets, that will just mean that + // empty buckets won't be added before the first bucket and/or after the last one long min = baseMillis - intervalMillis * randomNumberOfBuckets(); long max = baseMillis + randomNumberOfBuckets() * intervalMillis; extendedBounds = new LongBounds(min, max); @@ -68,15 +68,13 @@ public void setUp() throws Exception { } @Override - protected InternalDateHistogram createTestInstance(String name, - Map metadata, - InternalAggregations aggregations) { + protected InternalDateHistogram createTestInstance(String name, Map metadata, InternalAggregations aggregations) { int nbBuckets = randomNumberOfBuckets(); List buckets = new ArrayList<>(nbBuckets); - //avoid having different random instance start from exactly the same base + // avoid having different random instance start from exactly the same base long startingDate = baseMillis - intervalMillis * randomNumberOfBuckets(); for (int i = 0; i < nbBuckets; i++) { - //rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 + // rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 if (frequently()) { long key = startingDate + intervalMillis * i; buckets.add(new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), keyed, format, aggregations)); @@ -91,8 +89,10 @@ protected void assertReduced(InternalDateHistogram reduced, List expectedCounts = new TreeMap<>(); for (Histogram histogram : inputs) { for (Histogram.Bucket bucket : histogram.getBuckets()) { - expectedCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + expectedCounts.compute( + ((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount() + ); } } if (minDocCount == 0) { @@ -130,8 +130,10 @@ protected void assertReduced(InternalDateHistogram reduced, List actualCounts = new TreeMap<>(); for (Histogram.Bucket bucket : reduced.getBuckets()) { - actualCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + actualCounts.compute( + ((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount() + ); } assertEquals(expectedCounts, actualCounts); } @@ -151,34 +153,41 @@ protected InternalDateHistogram mutateInstance(InternalDateHistogram instance) { InternalDateHistogram.EmptyBucketInfo emptyBucketInfo = instance.emptyBucketInfo; Map metadata = instance.getMetadata(); switch (between(0, 5)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - buckets = new ArrayList<>(buckets); - buckets.add(new InternalDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), keyed, format, - InternalAggregations.EMPTY)); - break; - case 2: - order = BucketOrder.count(randomBoolean()); - break; - case 3: - minDocCount += between(1, 10); - emptyBucketInfo = null; - break; - case 4: - offset += between(1, 20); - break; - case 5: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + buckets = new ArrayList<>(buckets); + buckets.add( + new InternalDateHistogram.Bucket( + randomNonNegativeLong(), + randomIntBetween(1, 100), + keyed, + format, + InternalAggregations.EMPTY + ) + ); + break; + case 2: + order = BucketOrder.count(randomBoolean()); + break; + case 3: + minDocCount += between(1, 10); + emptyBucketInfo = null; + break; + case 4: + offset += between(1, 20); + break; + case 5: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalDateHistogram(name, buckets, order, minDocCount, offset, emptyBucketInfo, format, keyed, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java index f8bfdffc8df78..76777561e998c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java @@ -36,9 +36,9 @@ public void setUp() throws Exception { super.setUp(); keyed = randomBoolean(); format = randomNumericDocValueFormat(); - //in order for reduction to work properly (and be realistic) we need to use the same interval, minDocCount, emptyBucketInfo - //and offset in all randomly created aggs as part of the same test run. This is particularly important when minDocCount is - //set to 0 as empty buckets need to be added to fill the holes. + // in order for reduction to work properly (and be realistic) we need to use the same interval, minDocCount, emptyBucketInfo + // and offset in all randomly created aggs as part of the same test run. This is particularly important when minDocCount is + // set to 0 as empty buckets need to be added to fill the holes. interval = randomIntBetween(1, 3); offset = randomIntBetween(0, 3); if (randomBoolean()) { @@ -46,10 +46,10 @@ public void setUp() throws Exception { emptyBucketInfo = null; } else { minDocCount = 0; - //it's ok if minBound and maxBound are outside the range of the generated buckets, that will just mean that - //empty buckets won't be added before the first bucket and/or after the last one + // it's ok if minBound and maxBound are outside the range of the generated buckets, that will just mean that + // empty buckets won't be added before the first bucket and/or after the last one int minBound = randomInt(50) - 30; - int maxBound = randomNumberOfBuckets() * interval + randomIntBetween(0, 10); + int maxBound = randomNumberOfBuckets() * interval + randomIntBetween(0, 10); emptyBucketInfo = new InternalHistogram.EmptyBucketInfo(interval, offset, minBound, maxBound, InternalAggregations.EMPTY); } } @@ -64,7 +64,7 @@ protected InternalHistogram createTestInstance(String name, Map final int numBuckets = randomNumberOfBuckets(); List buckets = new ArrayList<>(); for (int i = 0; i < numBuckets; ++i) { - //rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 + // rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 if (frequently()) { final int docCount = TestUtil.nextInt(random(), 1, 50); buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, keyed, format, aggregations)); @@ -92,21 +92,26 @@ public void testHandlesNaN() { newBuckets.add(new InternalHistogram.Bucket(Double.NaN, b.docCount, keyed, b.format, b.aggregations)); InternalHistogram newHistogram = histogram.create(newBuckets); - newHistogram.reduce(Arrays.asList(newHistogram, histogram2), - InternalAggregationTestCase.emptyReduceContextBuilder().forPartialReduction()); + newHistogram.reduce( + Arrays.asList(newHistogram, histogram2), + InternalAggregationTestCase.emptyReduceContextBuilder().forPartialReduction() + ); } public void testLargeReduce() { - expectReduceUsesTooManyBuckets(new InternalHistogram( - "h", - List.of(), - BucketOrder.key(true), - 0, - new InternalHistogram.EmptyBucketInfo(5e-10, 0, 0, 100, InternalAggregations.EMPTY), - DocValueFormat.RAW, - false, - null - ), 100000); + expectReduceUsesTooManyBuckets( + new InternalHistogram( + "h", + List.of(), + BucketOrder.key(true), + 0, + new InternalHistogram.EmptyBucketInfo(5e-10, 0, 0, 100, InternalAggregations.EMPTY), + DocValueFormat.RAW, + false, + null + ), + 100000 + ); } @Override @@ -114,8 +119,10 @@ protected void assertReduced(InternalHistogram reduced, List TreeMap expectedCounts = new TreeMap<>(); for (Histogram histogram : inputs) { for (Histogram.Bucket bucket : histogram.getBuckets()) { - expectedCounts.compute((Double) bucket.getKey(), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + expectedCounts.compute( + (Double) bucket.getKey(), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount() + ); } } if (minDocCount == 0) { @@ -143,8 +150,7 @@ protected void assertReduced(InternalHistogram reduced, List Map actualCounts = new TreeMap<>(); for (Histogram.Bucket bucket : reduced.getBuckets()) { - actualCounts.compute((Double) bucket.getKey(), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + actualCounts.compute((Double) bucket.getKey(), (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); } assertEquals(expectedCounts, actualCounts); } @@ -163,31 +169,38 @@ protected InternalHistogram mutateInstance(InternalHistogram instance) { Map metadata = instance.getMetadata(); InternalHistogram.EmptyBucketInfo emptyBucketInfo = instance.emptyBucketInfo; switch (between(0, 4)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - buckets = new ArrayList<>(buckets); - buckets.add(new InternalHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), keyed, format, - InternalAggregations.EMPTY)); - break; - case 2: - order = BucketOrder.count(randomBoolean()); - break; - case 3: - minDocCount += between(1, 10); - emptyBucketInfo = null; - break; - case 4: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + buckets = new ArrayList<>(buckets); + buckets.add( + new InternalHistogram.Bucket( + randomNonNegativeLong(), + randomIntBetween(1, 100), + keyed, + format, + InternalAggregations.EMPTY + ) + ); + break; + case 2: + order = BucketOrder.count(randomBoolean()); + break; + case 3: + minDocCount += between(1, 10); + emptyBucketInfo = null; + break; + case 4: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, format, keyed, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java index 0662511c3e7b7..35eb7eead10a0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java @@ -27,12 +27,11 @@ import java.util.List; import java.util.Map; -public class InternalVariableWidthHistogramTests extends - InternalMultiBucketAggregationTestCase{ +public class InternalVariableWidthHistogramTests extends InternalMultiBucketAggregationTestCase { private DocValueFormat format; private InternalVariableWidthHistogram.EmptyBucketInfo emptyBucktInfo; - private int numBuckets; + private int numBuckets; @Override public void setUp() throws Exception { @@ -42,7 +41,7 @@ public void setUp() throws Exception { this.numBuckets = 3; } - private InternalVariableWidthHistogram createEmptyTestInstance(){ + private InternalVariableWidthHistogram createEmptyTestInstance() { String name = randomAlphaOfLength(5); Map metadata = null; if (randomBoolean()) { @@ -57,9 +56,11 @@ private InternalVariableWidthHistogram createEmptyTestInstance(){ } @Override - protected InternalVariableWidthHistogram createTestInstance(String name, - Map metaData, - InternalAggregations aggregations) { + protected InternalVariableWidthHistogram createTestInstance( + String name, + Map metaData, + InternalAggregations aggregations + ) { final double base = randomIntBetween(-50, 50); final int numBuckets = randomIntBetween(1, 3); List buckets = new ArrayList<>(); @@ -68,13 +69,15 @@ protected InternalVariableWidthHistogram createTestInstance(String name, final int docCount = TestUtil.nextInt(random(), 1, 50); double add = randomDoubleBetween(1, 10, true); curKey += add; - buckets.add(new InternalVariableWidthHistogram.Bucket( - curKey, - new InternalVariableWidthHistogram.Bucket.BucketBounds(curKey - (add / 3), curKey + (add / 3)), - docCount, - format, - InternalAggregations.EMPTY - )); + buckets.add( + new InternalVariableWidthHistogram.Bucket( + curKey, + new InternalVariableWidthHistogram.Bucket.BucketBounds(curKey - (add / 3), curKey + (add / 3)), + docCount, + format, + InternalAggregations.EMPTY + ) + ); } return new InternalVariableWidthHistogram(name, buckets, emptyBucktInfo, numBuckets, format, metaData); } @@ -83,6 +86,7 @@ protected InternalVariableWidthHistogram createTestInstance(String name, protected Class implementationClass() { return ParsedVariableWidthHistogram.class; } + @Override protected InternalVariableWidthHistogram mutateInstance(InternalVariableWidthHistogram instance) { String name = instance.getName(); @@ -98,13 +102,15 @@ protected InternalVariableWidthHistogram mutateInstance(InternalVariableWidthHis buckets = new ArrayList<>(buckets); double boundMin = randomDouble(); double boundMax = Math.abs(boundMin) * 2; - buckets.add(new InternalVariableWidthHistogram.Bucket( - randomDouble(), - new InternalVariableWidthHistogram.Bucket.BucketBounds(boundMin, boundMax), - randomIntBetween(1, 100), - format, - InternalAggregations.EMPTY - )); + buckets.add( + new InternalVariableWidthHistogram.Bucket( + randomDouble(), + new InternalVariableWidthHistogram.Bucket.BucketBounds(boundMin, boundMax), + randomIntBetween(1, 100), + format, + InternalAggregations.EMPTY + ) + ); break; case 2: emptyBucketInfo = null; @@ -124,11 +130,17 @@ protected InternalVariableWidthHistogram mutateInstance(InternalVariableWidthHis public void testSingleShardReduceLong() { InternalVariableWidthHistogram dummy_histogram = createEmptyTestInstance(); List buckets = new ArrayList<>(); - for (long value : new long[]{1, 2, 5, 10, 12, 200}) { - InternalVariableWidthHistogram.Bucket.BucketBounds bounds = - new InternalVariableWidthHistogram.Bucket.BucketBounds(value, value + 1); + for (long value : new long[] { 1, 2, 5, 10, 12, 200 }) { + InternalVariableWidthHistogram.Bucket.BucketBounds bounds = new InternalVariableWidthHistogram.Bucket.BucketBounds( + value, + value + 1 + ); InternalVariableWidthHistogram.Bucket bucket = new InternalVariableWidthHistogram.Bucket( - value, bounds, 3, format, InternalAggregations.EMPTY + value, + bounds, + 3, + format, + InternalAggregations.EMPTY ); buckets.add(bucket); } @@ -137,23 +149,28 @@ public void testSingleShardReduceLong() { MockBigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService mockScriptService = mockScriptService(); - MultiBucketConsumerService.MultiBucketConsumer bucketConsumer = - new MultiBucketConsumerService.MultiBucketConsumer(DEFAULT_MAX_BUCKETS, - new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); + MultiBucketConsumerService.MultiBucketConsumer bucketConsumer = new MultiBucketConsumerService.MultiBucketConsumer( + DEFAULT_MAX_BUCKETS, + new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST) + ); InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( - bigArrays, mockScriptService, bucketConsumer, PipelineAggregator.PipelineTree.EMPTY); + bigArrays, + mockScriptService, + bucketConsumer, + PipelineAggregator.PipelineTree.EMPTY + ); ArrayList aggs = new ArrayList<>(); aggs.add(histogram); - List reduced_buckets = - ((InternalVariableWidthHistogram) histogram.reduce(aggs, context)).getBuckets(); + List reduced_buckets = ((InternalVariableWidthHistogram) histogram.reduce(aggs, context)) + .getBuckets(); // Final clusters should be [ (1,2,5), (10,12), 200) ] // Final centroids should be [ 3, 11, 200 ] // Final keys should be [ 1, 5, 200 ] double double_error = 1d / 10000d; assertEquals(1d, reduced_buckets.get(0).min(), double_error); - assertEquals((8d/3d), (double) reduced_buckets.get(0).getKey(), double_error); + assertEquals((8d / 3d), (double) reduced_buckets.get(0).getKey(), double_error); assertEquals(9, reduced_buckets.get(0).getDocCount()); assertEquals(10d, reduced_buckets.get(1).min(), double_error); assertEquals(11d, (double) reduced_buckets.get(1).getKey(), double_error); @@ -166,43 +183,53 @@ public void testSingleShardReduceLong() { public void testSingleShardReduceDouble() { InternalVariableWidthHistogram dummy_histogram = createEmptyTestInstance(); List buckets = new ArrayList<>(); - for (double value : new double[]{-1.3, -1.3, 12.0, 13.0, 20.0, 21.5, 23.0, 24.5}) { - InternalVariableWidthHistogram.Bucket.BucketBounds bounds = - new InternalVariableWidthHistogram.Bucket.BucketBounds(value - 0.7, value + 1); + for (double value : new double[] { -1.3, -1.3, 12.0, 13.0, 20.0, 21.5, 23.0, 24.5 }) { + InternalVariableWidthHistogram.Bucket.BucketBounds bounds = new InternalVariableWidthHistogram.Bucket.BucketBounds( + value - 0.7, + value + 1 + ); InternalVariableWidthHistogram.Bucket bucket = new InternalVariableWidthHistogram.Bucket( - value, bounds, 1, format, InternalAggregations.EMPTY + value, + bounds, + 1, + format, + InternalAggregations.EMPTY ); buckets.add(bucket); } InternalVariableWidthHistogram histogram = dummy_histogram.create(buckets); - MockBigArrays bigArrays = - new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); + MockBigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService mockScriptService = mockScriptService(); - MultiBucketConsumerService.MultiBucketConsumer bucketConsumer = - new MultiBucketConsumerService.MultiBucketConsumer(DEFAULT_MAX_BUCKETS, - new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); + MultiBucketConsumerService.MultiBucketConsumer bucketConsumer = new MultiBucketConsumerService.MultiBucketConsumer( + DEFAULT_MAX_BUCKETS, + new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST) + ); InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( - bigArrays, mockScriptService, bucketConsumer, PipelineAggregator.PipelineTree.EMPTY); + bigArrays, + mockScriptService, + bucketConsumer, + PipelineAggregator.PipelineTree.EMPTY + ); ArrayList aggs = new ArrayList<>(); aggs.add(histogram); - List reduced_buckets = - ((InternalVariableWidthHistogram) histogram.reduce(aggs, context)).getBuckets(); + List reduced_buckets = ((InternalVariableWidthHistogram) histogram.reduce(aggs, context)) + .getBuckets(); // Final clusters should be [ (-1.3,-1.3), (12.0,13.0), (20.0, 21.5, 23.0, 24.5) ] // Final centroids should be [ -1.3, 12.5, 22.25 ] // Final keys should be [ -1.3, 11.7, 19.7 ] double double_error = 1d / 10000d; assertEquals(-2.0, reduced_buckets.get(0).min(), double_error); - assertEquals(-1.3, (double)reduced_buckets.get(0).getKey(), double_error); + assertEquals(-1.3, (double) reduced_buckets.get(0).getKey(), double_error); assertEquals(2, reduced_buckets.get(0).getDocCount()); assertEquals(11.3, reduced_buckets.get(1).min(), double_error); - assertEquals(12.5, (double)reduced_buckets.get(1).getKey(), double_error); + assertEquals(12.5, (double) reduced_buckets.get(1).getKey(), double_error); assertEquals(2, reduced_buckets.get(1).getDocCount()); assertEquals(19.3, reduced_buckets.get(2).min(), double_error); - assertEquals(22.25, (double)reduced_buckets.get(2).getKey(), double_error); + assertEquals(22.25, (double) reduced_buckets.get(2).getKey(), double_error); assertEquals(4, reduced_buckets.get(2).getDocCount()); } @@ -210,31 +237,49 @@ public void testMultipleShardsReduce() { InternalVariableWidthHistogram dummy_histogram = createEmptyTestInstance(); List buckets1 = new ArrayList<>(); - for (long value : new long[]{1, 5, 6, 10}) { - InternalVariableWidthHistogram.Bucket.BucketBounds bounds = - new InternalVariableWidthHistogram.Bucket.BucketBounds(value, value + 1); + for (long value : new long[] { 1, 5, 6, 10 }) { + InternalVariableWidthHistogram.Bucket.BucketBounds bounds = new InternalVariableWidthHistogram.Bucket.BucketBounds( + value, + value + 1 + ); InternalVariableWidthHistogram.Bucket bucket = new InternalVariableWidthHistogram.Bucket( - value, bounds, 1, format, InternalAggregations.EMPTY + value, + bounds, + 1, + format, + InternalAggregations.EMPTY ); buckets1.add(bucket); } List buckets2 = new ArrayList<>(); - for (long value : new long[]{2, 3, 6, 7}) { - InternalVariableWidthHistogram.Bucket.BucketBounds bounds = - new InternalVariableWidthHistogram.Bucket.BucketBounds(value, value + 1); + for (long value : new long[] { 2, 3, 6, 7 }) { + InternalVariableWidthHistogram.Bucket.BucketBounds bounds = new InternalVariableWidthHistogram.Bucket.BucketBounds( + value, + value + 1 + ); InternalVariableWidthHistogram.Bucket bucket = new InternalVariableWidthHistogram.Bucket( - value, bounds, 1, format, InternalAggregations.EMPTY + value, + bounds, + 1, + format, + InternalAggregations.EMPTY ); buckets2.add(bucket); } List buckets3 = new ArrayList<>(); - for (long value : new long[]{0, 2, 12}) { - InternalVariableWidthHistogram.Bucket.BucketBounds bounds = - new InternalVariableWidthHistogram.Bucket.BucketBounds(value, value + 1); + for (long value : new long[] { 0, 2, 12 }) { + InternalVariableWidthHistogram.Bucket.BucketBounds bounds = new InternalVariableWidthHistogram.Bucket.BucketBounds( + value, + value + 1 + ); InternalVariableWidthHistogram.Bucket bucket = new InternalVariableWidthHistogram.Bucket( - value, bounds, 1, format, InternalAggregations.EMPTY + value, + bounds, + 1, + format, + InternalAggregations.EMPTY ); buckets3.add(bucket); } @@ -243,29 +288,33 @@ public void testMultipleShardsReduce() { InternalVariableWidthHistogram histogram2 = dummy_histogram.create(buckets2); InternalVariableWidthHistogram histogram3 = dummy_histogram.create(buckets3); - MockBigArrays bigArrays = - new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); + MockBigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService mockScriptService = mockScriptService(); - MultiBucketConsumerService.MultiBucketConsumer bucketConsumer = - new MultiBucketConsumerService.MultiBucketConsumer(DEFAULT_MAX_BUCKETS, - new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); + MultiBucketConsumerService.MultiBucketConsumer bucketConsumer = new MultiBucketConsumerService.MultiBucketConsumer( + DEFAULT_MAX_BUCKETS, + new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST) + ); InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( - bigArrays, mockScriptService, bucketConsumer, PipelineAggregator.PipelineTree.EMPTY); + bigArrays, + mockScriptService, + bucketConsumer, + PipelineAggregator.PipelineTree.EMPTY + ); ArrayList aggs = new ArrayList<>(); aggs.add(histogram1); aggs.add(histogram2); aggs.add(histogram3); - List reduced_buckets = - ((InternalVariableWidthHistogram) histogram1.reduce(aggs, context)).getBuckets(); + List reduced_buckets = ((InternalVariableWidthHistogram) histogram1.reduce(aggs, context)) + .getBuckets(); // Final clusters should be [ (0, 1, 2, 2, 3), (5, 6, 6, 7), (10, 12) ] // Final centroids should be [ 2, 6, 11 ] // Final keys should be [ 1, 5, 10 ] double double_error = 1d / 10000d; assertEquals(0d, reduced_buckets.get(0).min(), double_error); - assertEquals(1.6d, (double)reduced_buckets.get(0).getKey(), double_error); + assertEquals(1.6d, (double) reduced_buckets.get(0).getKey(), double_error); assertEquals(5, reduced_buckets.get(0).getDocCount()); assertEquals(5d, reduced_buckets.get(1).min(), double_error); assertEquals(6d, (double) reduced_buckets.get(1).getKey(), double_error); @@ -278,30 +327,40 @@ public void testMultipleShardsReduce() { public void testOverlappingReduceResult() { InternalVariableWidthHistogram dummy_histogram = createEmptyTestInstance(); List buckets = new ArrayList<>(); - for (long value : new long[]{1, 2, 4, 10}) { - InternalVariableWidthHistogram.Bucket.BucketBounds bounds = - new InternalVariableWidthHistogram.Bucket.BucketBounds(value, value + 3); + for (long value : new long[] { 1, 2, 4, 10 }) { + InternalVariableWidthHistogram.Bucket.BucketBounds bounds = new InternalVariableWidthHistogram.Bucket.BucketBounds( + value, + value + 3 + ); InternalVariableWidthHistogram.Bucket bucket = new InternalVariableWidthHistogram.Bucket( - value, bounds, 4, format, InternalAggregations.EMPTY + value, + bounds, + 4, + format, + InternalAggregations.EMPTY ); buckets.add(bucket); } InternalVariableWidthHistogram histogram = dummy_histogram.create(buckets); - MockBigArrays bigArrays = - new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); + MockBigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService mockScriptService = mockScriptService(); - MultiBucketConsumerService.MultiBucketConsumer bucketConsumer = - new MultiBucketConsumerService.MultiBucketConsumer(DEFAULT_MAX_BUCKETS, - new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); + MultiBucketConsumerService.MultiBucketConsumer bucketConsumer = new MultiBucketConsumerService.MultiBucketConsumer( + DEFAULT_MAX_BUCKETS, + new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST) + ); InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( - bigArrays, mockScriptService, bucketConsumer, PipelineAggregator.PipelineTree.EMPTY); + bigArrays, + mockScriptService, + bucketConsumer, + PipelineAggregator.PipelineTree.EMPTY + ); ArrayList aggs = new ArrayList<>(); aggs.add(histogram); - List reduced_buckets = - ((InternalVariableWidthHistogram) histogram.reduce(aggs, context)).getBuckets(); + List reduced_buckets = ((InternalVariableWidthHistogram) histogram.reduce(aggs, context)) + .getBuckets(); // Expected clusters: [ (1, 2), (4), 10) ] // Expected centroids: [ 1.5, 4, 10 ] @@ -326,38 +385,42 @@ public void testOverlappingReduceResult() { public void testSameMinMerge() { InternalVariableWidthHistogram dummy_histogram = createEmptyTestInstance(); List buckets = new ArrayList<>(); - for (long value : new long[]{1, 100, 700}) { + for (long value : new long[] { 1, 100, 700 }) { InternalVariableWidthHistogram.Bucket.BucketBounds bounds; - if(value == 1 || value == 100) { - bounds = new InternalVariableWidthHistogram.Bucket.BucketBounds( - 1, value - ); - } else{ - bounds = new InternalVariableWidthHistogram.Bucket.BucketBounds( - value, value + 1 - ); + if (value == 1 || value == 100) { + bounds = new InternalVariableWidthHistogram.Bucket.BucketBounds(1, value); + } else { + bounds = new InternalVariableWidthHistogram.Bucket.BucketBounds(value, value + 1); } InternalVariableWidthHistogram.Bucket bucket = new InternalVariableWidthHistogram.Bucket( - value, bounds, 1, format, InternalAggregations.EMPTY + value, + bounds, + 1, + format, + InternalAggregations.EMPTY ); buckets.add(bucket); } InternalVariableWidthHistogram histogram = dummy_histogram.create(buckets); - MockBigArrays bigArrays = - new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); + MockBigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService mockScriptService = mockScriptService(); - MultiBucketConsumerService.MultiBucketConsumer bucketConsumer = - new MultiBucketConsumerService.MultiBucketConsumer(DEFAULT_MAX_BUCKETS, - new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); + MultiBucketConsumerService.MultiBucketConsumer bucketConsumer = new MultiBucketConsumerService.MultiBucketConsumer( + DEFAULT_MAX_BUCKETS, + new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST) + ); InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( - bigArrays, mockScriptService, bucketConsumer, PipelineAggregator.PipelineTree.EMPTY); + bigArrays, + mockScriptService, + bucketConsumer, + PipelineAggregator.PipelineTree.EMPTY + ); ArrayList aggs = new ArrayList<>(); aggs.add(histogram); - List reduced_buckets = - ((InternalVariableWidthHistogram) histogram.reduce(aggs, context)).getBuckets(); + List reduced_buckets = ((InternalVariableWidthHistogram) histogram.reduce(aggs, context)) + .getBuckets(); // Expected clusters: [ (1), (100), (700) ] // Expected clusters after same min merge: [ (1, 100), (700) ] @@ -367,7 +430,7 @@ public void testSameMinMerge() { double double_error = 1d / 10000d; assertEquals(2, reduced_buckets.size()); assertEquals(1d, reduced_buckets.get(0).min(), double_error); - assertEquals((101d/2d), (double) reduced_buckets.get(0).getKey(), double_error); + assertEquals((101d / 2d), (double) reduced_buckets.get(0).getKey(), double_error); assertEquals(2, reduced_buckets.get(0).getDocCount()); assertEquals(700d, reduced_buckets.get(1).min(), double_error); assertEquals(700d, (double) reduced_buckets.get(1).getKey(), double_error); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/LongBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/LongBoundsTests.java index 20a523bf8edda..007dcf89d639a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/LongBoundsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/LongBoundsTests.java @@ -95,15 +95,23 @@ public void testParseAndValidate() { assertNull(parsed.getMin()); assertEquals(now, (long) parsed.getMax()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new LongBounds(100L, 90L).parseAndValidate("test", "extended_bounds", nowInMillis, format)); - assertEquals("[extended_bounds.min][100] cannot be greater than [extended_bounds.max][90] for histogram aggregation [test]", - e.getMessage()); - - e = expectThrows(IllegalArgumentException.class, - () -> unparsed(new LongBounds(100L, 90L)).parseAndValidate("test", "extended_bounds", nowInMillis, format)); - assertEquals("[extended_bounds.min][100] cannot be greater than [extended_bounds.max][90] for histogram aggregation [test]", - e.getMessage()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new LongBounds(100L, 90L).parseAndValidate("test", "extended_bounds", nowInMillis, format) + ); + assertEquals( + "[extended_bounds.min][100] cannot be greater than [extended_bounds.max][90] for histogram aggregation [test]", + e.getMessage() + ); + + e = expectThrows( + IllegalArgumentException.class, + () -> unparsed(new LongBounds(100L, 90L)).parseAndValidate("test", "extended_bounds", nowInMillis, format) + ); + assertEquals( + "[extended_bounds.min][100] cannot be greater than [extended_bounds.max][90] for histogram aggregation [test]", + e.getMessage() + ); } public void testTransportRoundTrip() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java index b058e07be48e4..5e33edca71307 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java @@ -42,17 +42,14 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { public void testLongs() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (long value : new long[] {7, 3, -10, -6, 5, 15}) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (long value : new long[] { 7, 3, -10, -6, 5, 15 }) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", value)); w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); @@ -75,17 +72,14 @@ public void testLongs() throws Exception { } public void testDoubles() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (double value : new double[] {9.3, 3.2, -10, -6.5, 5.3, 15.1}) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (double value : new double[] { 9.3, 3.2, -10, -6.5, 5.3, 15.1 }) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); @@ -119,13 +113,13 @@ public void testDates() throws Exception { "2019-11-07T13:47:43", "2019-11-08T16:14:34", "2019-11-09T17:09:50", - "2019-11-10T22:55:46"); + "2019-11-10T22:55:46" + ); String fieldName = "date_field"; DateFieldMapper.DateFieldType fieldType = dateField(fieldName, DateFieldMapper.Resolution.MILLISECONDS); - try (Directory dir = newDirectory(); - RandomIndexWriter indexWriter = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), dir)) { Document document = new Document(); for (String date : dataset) { long instant = fieldType.parse(date); @@ -134,8 +128,7 @@ public void testDates() throws Exception { document.clear(); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field(fieldName) + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(fieldName) .interval(1000 * 60 * 60 * 24); try (IndexReader reader = indexWriter.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); @@ -146,17 +139,14 @@ public void testDates() throws Exception { } public void testIrrationalInterval() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (long value : new long[] {3, 2, -10, 5, -9}) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (long value : new long[] { 3, 2, -10, 5, -9 }) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", value)); w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(Math.PI); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(Math.PI); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); @@ -179,18 +169,14 @@ public void testIrrationalInterval() throws Exception { } public void testMinDocCount() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (long value : new long[] {7, 3, -10, -6, 5, 50}) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (long value : new long[] { 7, 3, -10, -6, 5, 50 }) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", value)); w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(10) - .minDocCount(2); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(10).minDocCount(2); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); @@ -205,19 +191,15 @@ public void testMinDocCount() throws Exception { } public void testMissing() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (long value : new long[] {7, 3, -10, -6, 5, 15}) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (long value : new long[] { 7, 3, -10, -6, 5, 15 }) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", value)); w.addDocument(doc); w.addDocument(new Document()); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5) - .missing(2d); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5).missing(2d); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); @@ -240,17 +222,13 @@ public void testMissing() throws Exception { } public void testMissingUnmappedField() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (int i = 0; i < 7; i ++) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (int i = 0; i < 7; i++) { Document doc = new Document(); w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5) - .missing(2d); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5).missing(2d); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder); @@ -266,22 +244,21 @@ public void testMissingUnmappedField() throws Exception { } public void testMissingUnmappedFieldBadType() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (int i = 0; i < 7; i ++) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (int i = 0; i < 7; i++) { w.addDocument(new Document()); } String missingValue = "🍌🍌🍌"; - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field") .interval(5) .missing(missingValue); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - Throwable t = expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder); - }); + Throwable t = expectThrows( + IllegalArgumentException.class, + () -> { searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder); } + ); // This throws a number format exception (which is a subclass of IllegalArgumentException) and might be ok? assertThat(t.getMessage(), containsString(missingValue)); } @@ -289,41 +266,35 @@ public void testMissingUnmappedFieldBadType() throws Exception { } public void testIncorrectFieldType() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (String value : new String[] {"foo", "bar", "baz", "quux"}) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (String value : new String[] { "foo", "bar", "baz", "quux" }) { Document doc = new Document(); doc.add(new SortedSetDocValuesField("field", new BytesRef(value))); w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - expectThrows(IllegalArgumentException.class, () -> { - searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, keywordField("field")); - }); + expectThrows( + IllegalArgumentException.class, + () -> { searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, keywordField("field")); } + ); } } } public void testOffset() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (double value : new double[] {9.3, 3.2, -5, -6.5, 5.3}) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (double value : new double[] { 9.3, 3.2, -5, -6.5, 5.3 }) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5) - .offset(Math.PI); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5).offset(Math.PI); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); @@ -342,10 +313,9 @@ public void testOffset() throws Exception { } public void testRandomOffset() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { // Note, these values are carefully chosen to ensure that no matter what offset we pick, no two can end up in the same bucket - for (double value : new double[] {9.3, 3.2, -5}) { + for (double value : new double[] { 9.3, 3.2, -5 }) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); @@ -354,8 +324,7 @@ public void testRandomOffset() throws Exception { final double offset = randomDouble(); final double interval = 5; final double expectedOffset = offset % interval; - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field") .interval(interval) .offset(offset); try (IndexReader reader = w.getReader()) { @@ -381,18 +350,16 @@ public void testRandomOffset() throws Exception { } public void testExtendedBounds() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (double value : new double[] {3.2, -5, -4.5, 4.3}) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (double value : new double[] { 3.2, -5, -4.5, 4.3 }) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5) - .extendedBounds(-12, 13); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field") + .interval(5) + .extendedBounds(-12, 13); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); @@ -439,17 +406,23 @@ public void testHardBounds() throws Exception { } public void testAsSubAgg() throws IOException { - AggregationBuilder request = new HistogramAggregationBuilder("outer").field("outer").interval(5).subAggregation( - new HistogramAggregationBuilder("inner").field("inner").interval(5).subAggregation( - new MinAggregationBuilder("min").field("n"))); + AggregationBuilder request = new HistogramAggregationBuilder("outer").field("outer") + .interval(5) + .subAggregation( + new HistogramAggregationBuilder("inner").field("inner") + .interval(5) + .subAggregation(new MinAggregationBuilder("min").field("n")) + ); CheckedConsumer buildIndex = iw -> { List> docs = new ArrayList<>(); for (int n = 0; n < 10000; n++) { - docs.add(List.of( - new SortedNumericDocValuesField("outer", n % 100), - new SortedNumericDocValuesField("inner", n / 100), - new SortedNumericDocValuesField("n", n) - )); + docs.add( + List.of( + new SortedNumericDocValuesField("outer", n % 100), + new SortedNumericDocValuesField("inner", n / 100), + new SortedNumericDocValuesField("n", n) + ) + ); } iw.addDocuments(docs); }; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java index e84b4f1d3924a..2be620f64f541 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java @@ -18,8 +18,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -40,8 +40,7 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase { public void testDoubles() throws Exception { RangeType rangeType = RangeType.DOUBLE; - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (RangeFieldMapper.Range range : new RangeFieldMapper.Range[] { new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true), // bucket 0 5 new RangeFieldMapper.Range(rangeType, -3.1, 4.2, true, true), // bucket -5, 0 @@ -54,14 +53,16 @@ public void testDoubles() throws Exception { w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = - searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + InternalHistogram histogram = searchAndReduce( + searcher, + new MatchAllDocsQuery(), + aggBuilder, + rangeField("field", rangeType) + ); assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); @@ -90,8 +91,7 @@ public void testDoubles() throws Exception { public void testLongs() throws Exception { RangeType rangeType = RangeType.LONG; - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (RangeFieldMapper.Range range : new RangeFieldMapper.Range[] { new RangeFieldMapper.Range(rangeType, 1L, 5L, true, true), // bucket 0 5 new RangeFieldMapper.Range(rangeType, -3L, 4L, true, true), // bucket -5, 0 @@ -104,14 +104,16 @@ public void testLongs() throws Exception { w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = - searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + InternalHistogram histogram = searchAndReduce( + searcher, + new MatchAllDocsQuery(), + aggBuilder, + rangeField("field", rangeType) + ); assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); @@ -140,26 +142,29 @@ public void testLongs() throws Exception { public void testMultipleRanges() throws Exception { RangeType rangeType = RangeType.LONG; - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { Document doc = new Document(); - BytesRef encodedRange = rangeType.encodeRanges(Set.of( - new RangeFieldMapper.Range(rangeType, 1L, 5L, true, true), // bucket 0 5 - new RangeFieldMapper.Range(rangeType, -3L, 4L, true, true), // bucket -5, 0 - new RangeFieldMapper.Range(rangeType, 4L, 13L, true, true), // bucket 0, 5, 10 - new RangeFieldMapper.Range(rangeType, 22L, 29L, true, true) // bucket 20, 25, 30 - )); + BytesRef encodedRange = rangeType.encodeRanges( + Set.of( + new RangeFieldMapper.Range(rangeType, 1L, 5L, true, true), // bucket 0 5 + new RangeFieldMapper.Range(rangeType, -3L, 4L, true, true), // bucket -5, 0 + new RangeFieldMapper.Range(rangeType, 4L, 13L, true, true), // bucket 0, 5, 10 + new RangeFieldMapper.Range(rangeType, 22L, 29L, true, true) // bucket 20, 25, 30 + ) + ); doc.add(new BinaryDocValuesField("field", encodedRange)); w.addDocument(doc); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = - searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + InternalHistogram histogram = searchAndReduce( + searcher, + new MatchAllDocsQuery(), + aggBuilder, + rangeField("field", rangeType) + ); assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); @@ -189,26 +194,29 @@ public void testMultipleRanges() throws Exception { public void testMultipleRangesLotsOfOverlap() throws Exception { RangeType rangeType = RangeType.LONG; - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { Document doc = new Document(); - BytesRef encodedRange = rangeType.encodeRanges(Set.of( - new RangeFieldMapper.Range(rangeType, 1L, 2L, true, true), // bucket 0 - new RangeFieldMapper.Range(rangeType, 1L, 4L, true, true), // bucket 0 - new RangeFieldMapper.Range(rangeType, 1L, 13L, true, true), // bucket 0, 5, 10 - new RangeFieldMapper.Range(rangeType, 1L, 5L, true, true) // bucket 0, 5 - )); + BytesRef encodedRange = rangeType.encodeRanges( + Set.of( + new RangeFieldMapper.Range(rangeType, 1L, 2L, true, true), // bucket 0 + new RangeFieldMapper.Range(rangeType, 1L, 4L, true, true), // bucket 0 + new RangeFieldMapper.Range(rangeType, 1L, 13L, true, true), // bucket 0, 5, 10 + new RangeFieldMapper.Range(rangeType, 1L, 5L, true, true) // bucket 0, 5 + ) + ); doc.add(new BinaryDocValuesField("field", encodedRange)); w.addDocument(doc); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = - searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + InternalHistogram histogram = searchAndReduce( + searcher, + new MatchAllDocsQuery(), + aggBuilder, + rangeField("field", rangeType) + ); assertEquals(3, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); @@ -226,8 +234,7 @@ public void testMultipleRangesLotsOfOverlap() throws Exception { public void testLongsIrrationalInterval() throws Exception { RangeType rangeType = RangeType.LONG; - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (RangeFieldMapper.Range range : new RangeFieldMapper.Range[] { new RangeFieldMapper.Range(rangeType, 1L, 5L, true, true), // bucket 0 5 new RangeFieldMapper.Range(rangeType, -3L, 4L, true, true), // bucket -5, 0 @@ -239,14 +246,16 @@ public void testLongsIrrationalInterval() throws Exception { w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(Math.PI); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(Math.PI); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = - searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + InternalHistogram histogram = searchAndReduce( + searcher, + new MatchAllDocsQuery(), + aggBuilder, + rangeField("field", rangeType) + ); assertEquals(6, histogram.getBuckets().size()); assertEquals(-1 * Math.PI, histogram.getBuckets().get(0).getKey()); @@ -272,12 +281,11 @@ public void testLongsIrrationalInterval() throws Exception { public void testMinDocCount() throws Exception { RangeType rangeType = RangeType.LONG; - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (RangeFieldMapper.Range range : new RangeFieldMapper.Range[] { new RangeFieldMapper.Range(rangeType, -14L, -11L, true, true), // bucket -15 new RangeFieldMapper.Range(rangeType, 0L, 9L, true, true), // bucket 0, 5 - new RangeFieldMapper.Range(rangeType, 6L, 12L, true, true), // bucket 5, 10 + new RangeFieldMapper.Range(rangeType, 6L, 12L, true, true), // bucket 5, 10 new RangeFieldMapper.Range(rangeType, 13L, 14L, true, true), // bucket 10 }) { Document doc = new Document(); @@ -286,10 +294,7 @@ public void testMinDocCount() throws Exception { w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5) - .minDocCount(2); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5).minDocCount(2); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); @@ -312,8 +317,7 @@ public void testMinDocCount() throws Exception { public void testOffset() throws Exception { RangeType rangeType = RangeType.DOUBLE; - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (RangeFieldMapper.Range range : new RangeFieldMapper.Range[] { new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true), // bucket -1, 4 new RangeFieldMapper.Range(rangeType, -3.1, 4.2, true, true), // bucket -6 -1 4 @@ -326,15 +330,16 @@ public void testOffset() throws Exception { w.addDocument(doc); } - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5) - .offset(4); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5).offset(4); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = - searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + InternalHistogram histogram = searchAndReduce( + searcher, + new MatchAllDocsQuery(), + aggBuilder, + rangeField("field", rangeType) + ); assertEquals(8, histogram.getBuckets().size()); assertEquals(-6d, histogram.getBuckets().get(0).getKey()); @@ -366,8 +371,7 @@ public void testOffset() throws Exception { public void testOffsetGtInterval() throws Exception { RangeType rangeType = RangeType.DOUBLE; - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (RangeFieldMapper.Range range : new RangeFieldMapper.Range[] { new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true), // bucket 0 5 new RangeFieldMapper.Range(rangeType, -3.1, 4.2, true, true), // bucket -5, 0 @@ -386,15 +390,18 @@ public void testOffsetGtInterval() throws Exception { final double interval = 5; final double expectedOffset = offset % interval; - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field") .interval(interval) .offset(offset); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = - searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + InternalHistogram histogram = searchAndReduce( + searcher, + new MatchAllDocsQuery(), + aggBuilder, + rangeField("field", rangeType) + ); assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d + expectedOffset, histogram.getBuckets().get(0).getKey()); @@ -421,50 +428,62 @@ public void testOffsetGtInterval() throws Exception { } } - public void testIpRangesUnsupported() throws Exception { RangeType rangeType = RangeType.IP; - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { Document doc = new Document(); - BytesRef encodedRange = - rangeType.encodeRanges(Collections.singleton(new RangeFieldMapper.Range(rangeType, InetAddresses.forString("10.0.0.1"), - InetAddresses.forString("10.0.0.10"), true, true))); + BytesRef encodedRange = rangeType.encodeRanges( + Collections.singleton( + new RangeFieldMapper.Range( + rangeType, + InetAddresses.forString("10.0.0.1"), + InetAddresses.forString("10.0.0.10"), + true, + true + ) + ) + ); doc.add(new BinaryDocValuesField("field", encodedRange)); w.addDocument(doc); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field("field") - .interval(5); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field").interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - Exception e = expectThrows(IllegalArgumentException.class, () -> - searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType))); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)) + ); assertThat(e.getMessage(), equalTo("Expected numeric range type but found non-numeric range [ip_range]")); } } } public void testAsSubAgg() throws IOException { - AggregationBuilder request = new HistogramAggregationBuilder("outer").field("outer").interval(5).subAggregation( - new HistogramAggregationBuilder("inner").field("inner").interval(5).subAggregation( - new MinAggregationBuilder("min").field("n"))); + AggregationBuilder request = new HistogramAggregationBuilder("outer").field("outer") + .interval(5) + .subAggregation( + new HistogramAggregationBuilder("inner").field("inner") + .interval(5) + .subAggregation(new MinAggregationBuilder("min").field("n")) + ); CheckedConsumer buildIndex = iw -> { List> docs = new ArrayList<>(); for (int n = 0; n < 10000; n++) { - BytesRef outerRange = RangeType.LONG.encodeRanges(Set.of( - new RangeFieldMapper.Range(RangeType.LONG, n % 100, n % 100 + 10, true, true) - )); - BytesRef innerRange = RangeType.LONG.encodeRanges(Set.of( - new RangeFieldMapper.Range(RangeType.LONG, n / 100, n / 100 + 10, true, true) - )); - - docs.add(List.of( - new BinaryDocValuesField("outer", outerRange), - new BinaryDocValuesField("inner", innerRange), - new SortedNumericDocValuesField("n", n) - )); + BytesRef outerRange = RangeType.LONG.encodeRanges( + Set.of(new RangeFieldMapper.Range(RangeType.LONG, n % 100, n % 100 + 10, true, true)) + ); + BytesRef innerRange = RangeType.LONG.encodeRanges( + Set.of(new RangeFieldMapper.Range(RangeType.LONG, n / 100, n / 100 + 10, true, true)) + ); + + docs.add( + List.of( + new BinaryDocValuesField("outer", outerRange), + new BinaryDocValuesField("inner", innerRange), + new SortedNumericDocValuesField("n", n) + ) + ); } iw.addDocuments(docs); }; @@ -481,8 +500,7 @@ public void testAsSubAgg() throws IOException { InternalMin min = innerBucket.getAggregations().get("min"); int minOuterIdxWithOverlappingRange = Math.max(0, outerIdx - 2); int minInnerIdxWithOverlappingRange = Math.max(0, innerIdx - 2); - assertThat(min.getValue(), - equalTo(minOuterIdxWithOverlappingRange * 5.0 + minInnerIdxWithOverlappingRange * 500.0)); + assertThat(min.getValue(), equalTo(minOuterIdxWithOverlappingRange * 5.0 + minInnerIdxWithOverlappingRange * 500.0)); } } }; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java index 8d8cea36d97b2..a097c486c5435 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java @@ -20,8 +20,8 @@ import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -55,14 +55,18 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase { private static final Query DEFAULT_QUERY = new MatchAllDocsQuery(); - public void testNoDocs() throws Exception{ + public void testNoDocs() throws Exception { final List dataset = Arrays.asList(); - testSearchCase(DEFAULT_QUERY, dataset, true, + testSearchCase( + DEFAULT_QUERY, + dataset, + true, aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(6).setInitialBuffer(4), histogram -> { final List buckets = histogram.getBuckets(); assertEquals(0, buckets.size()); - }); + } + ); } public void testMoreClustersThanDocs() throws Exception { @@ -79,16 +83,14 @@ public void testMoreClustersThanDocs() throws Exception { expectedMins.put(-3d, -3d); expectedMins.put(10d, 10d); - testSearchCase(DEFAULT_QUERY, dataset, true, - aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(4), - histogram -> { - final List buckets = histogram.getBuckets(); - assertEquals(expectedDocCount.size(), buckets.size()); - buckets.forEach(bucket -> { - assertEquals(expectedDocCount.getOrDefault(bucket.getKey(), 0).longValue(), bucket.getDocCount()); - assertEquals(expectedMins.getOrDefault(bucket.getKey(), 0d).doubleValue(), bucket.min(), doubleError); - }); + testSearchCase(DEFAULT_QUERY, dataset, true, aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(4), histogram -> { + final List buckets = histogram.getBuckets(); + assertEquals(expectedDocCount.size(), buckets.size()); + buckets.forEach(bucket -> { + assertEquals(expectedDocCount.getOrDefault(bucket.getKey(), 0).longValue(), bucket.getDocCount()); + assertEquals(expectedMins.getOrDefault(bucket.getKey(), 0d).doubleValue(), bucket.min(), doubleError); }); + }); } public void testLongs() throws Exception { @@ -113,7 +115,10 @@ public void testLongs() throws Exception { expectedMaxes.put(1d, 1d); expectedMaxes.put(4.5, 5d); - testSearchCase(DEFAULT_QUERY, dataset, false, + testSearchCase( + DEFAULT_QUERY, + dataset, + false, aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(3).setShardSize(6).setInitialBuffer(3), histogram -> { final List buckets = histogram.getBuckets(); @@ -123,7 +128,8 @@ public void testLongs() throws Exception { assertEquals(expectedMins.getOrDefault(bucket.getKey(), 0d).doubleValue(), bucket.min(), doubleError); assertEquals(expectedMaxes.getOrDefault(bucket.getKey(), 0d).doubleValue(), bucket.max(), doubleError); }); - }); + } + ); } public void testDoubles() throws Exception { @@ -155,7 +161,10 @@ public void testDoubles() throws Exception { expectedMaxesOnlySearch.put(5.3, 5.9); expectedMaxesOnlySearch.put(8.8, 8.8); - testSearchCase(DEFAULT_QUERY, dataset, false, + testSearchCase( + DEFAULT_QUERY, + dataset, + false, aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(4).setShardSize(6).setInitialBuffer(4), histogram -> { final List buckets = histogram.getBuckets(); @@ -165,7 +174,8 @@ public void testDoubles() throws Exception { assertEquals(expectedCentroidsOnlySearch.getOrDefault(bucket.min(), 0d).doubleValue(), bucket.centroid(), doubleError); assertEquals(expectedMaxesOnlySearch.getOrDefault(bucket.min(), 0d).doubleValue(), bucket.max(), doubleError); }); - }); + } + ); // Search + Reduce @@ -181,20 +191,23 @@ public void testDoubles() throws Exception { // Indexed by min final Map expectedCentroidsSearchReduce = new HashMap<>(); expectedCentroidsSearchReduce.put(-0.4, 0.4); - expectedCentroidsSearchReduce.put(2.26,2.78); + expectedCentroidsSearchReduce.put(2.26, 2.78); expectedCentroidsSearchReduce.put(5.3, 5.6); expectedCentroidsSearchReduce.put(8.8, 8.8); final Map expectedMaxesSearchReduce = new HashMap<>(); expectedMaxesSearchReduce.put(-0.4, 1.2); - expectedMaxesSearchReduce.put(2.26,3.3); + expectedMaxesSearchReduce.put(2.26, 3.3); expectedMaxesSearchReduce.put(5.3, 5.9); expectedMaxesSearchReduce.put(8.8, 8.8); - testSearchCase(DEFAULT_QUERY, dataset, false, + testSearchCase( + DEFAULT_QUERY, + dataset, + false, aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(4).setShardSize(6).setInitialBuffer(4), histogram -> { - final List buckets = histogram.getBuckets(); - assertEquals(expectedDocCountSearchReduce.size(), buckets.size()); + final List buckets = histogram.getBuckets(); + assertEquals(expectedDocCountSearchReduce.size(), buckets.size()); buckets.forEach(bucket -> { long expectedDocCount = expectedDocCountSearchReduce.getOrDefault(bucket.min(), 0).longValue(); double expectedCentroid = expectedCentroidsSearchReduce.getOrDefault(bucket.min(), 0d).doubleValue(); @@ -203,7 +216,8 @@ public void testDoubles() throws Exception { assertEquals(expectedCentroid, bucket.centroid(), doubleError); assertEquals(expectedMax, bucket.max(), doubleError); }); - }); + } + ); } // Once the cache limit is reached, cached documents are collected into (3/4 * shard_size) buckets @@ -213,7 +227,7 @@ public void testNewBucketCreation() throws Exception { double doubleError = 1d / 10000d; // Expected clusters: [ (-1, 1), (3, 5), (7, 9), (11, 13), (15, 17), - // (19), (25), (30), (32), (36), (40), (50), (60), (75), (80) ] + // (19), (25), (30), (32), (36), (40), (50), (60), (75), (80) ] final List keys = Arrays.asList(0d, 4d, 8d, 12d, 16d, 19d, 25d, 30d, 32d, 36d, 40d, 50d, 60d, 75d, 80d); final List mins = Arrays.asList(-1d, 3d, 7d, 11d, 15d, 19d, 25d, 30d, 32d, 36d, 40d, 50d, 60d, 75d, 80d); final List maxes = Arrays.asList(1d, 5d, 9d, 13d, 17d, 19d, 25d, 30d, 32d, 36d, 40d, 50d, 60d, 75d, 80d); @@ -223,38 +237,39 @@ public void testNewBucketCreation() throws Exception { final Map expectedDocCountOnlySearch = new HashMap<>(); final Map expectedMinsOnlySearch = new HashMap<>(); final Map expectedMaxesOnlySearch = new HashMap<>(); - for(int i=0; i < keys.size(); i++){ + for (int i = 0; i < keys.size(); i++) { expectedDocCountOnlySearch.put(keys.get(i), docCounts.get(i)); expectedMinsOnlySearch.put(keys.get(i), mins.get(i)); expectedMaxesOnlySearch.put(keys.get(i), maxes.get(i)); } - testSearchCase(DEFAULT_QUERY, dataset, false, - aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(15), - histogram -> { - final List buckets = histogram.getBuckets(); - assertEquals(expectedDocCountOnlySearch.size(), buckets.size()); - buckets.forEach(bucket -> { - long expectedDocCount = expectedDocCountOnlySearch.getOrDefault(bucket.getKey(), 0).longValue(); - double expectedCentroid = expectedMinsOnlySearch.getOrDefault(bucket.getKey(), 0d).doubleValue(); - double expectedMax = expectedMaxesOnlySearch.getOrDefault(bucket.getKey(), 0d).doubleValue(); - assertEquals(bucket.getKeyAsString(), expectedDocCount, bucket.getDocCount()); - assertEquals(bucket.getKeyAsString(), expectedCentroid, bucket.min(), doubleError); - assertEquals(bucket.getKeyAsString(), expectedMax, bucket.max(), doubleError); - }); + testSearchCase(DEFAULT_QUERY, dataset, false, aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(15), histogram -> { + final List buckets = histogram.getBuckets(); + assertEquals(expectedDocCountOnlySearch.size(), buckets.size()); + buckets.forEach(bucket -> { + long expectedDocCount = expectedDocCountOnlySearch.getOrDefault(bucket.getKey(), 0).longValue(); + double expectedCentroid = expectedMinsOnlySearch.getOrDefault(bucket.getKey(), 0d).doubleValue(); + double expectedMax = expectedMaxesOnlySearch.getOrDefault(bucket.getKey(), 0d).doubleValue(); + assertEquals(bucket.getKeyAsString(), expectedDocCount, bucket.getDocCount()); + assertEquals(bucket.getKeyAsString(), expectedCentroid, bucket.min(), doubleError); + assertEquals(bucket.getKeyAsString(), expectedMax, bucket.max(), doubleError); }); + }); // Rerun the test with very large keys which can cause an overflow final Map expectedDocCountBigKeys = new HashMap<>(); final Map expectedMinsBigKeys = new HashMap<>(); final Map expectedMaxesBigKeys = new HashMap<>(); - for(int i=0; i< keys.size(); i++){ + for (int i = 0; i < keys.size(); i++) { expectedDocCountBigKeys.put(Long.MAX_VALUE * keys.get(i), docCounts.get(i)); expectedMinsBigKeys.put(Long.MAX_VALUE * keys.get(i), Long.MAX_VALUE * mins.get(i)); expectedMaxesBigKeys.put(Long.MAX_VALUE * keys.get(i), Long.MAX_VALUE * maxes.get(i)); } - testSearchCase(DEFAULT_QUERY, dataset.stream().map(n -> Double.valueOf(n.doubleValue() * Long.MAX_VALUE)).collect(toList()), false, + testSearchCase( + DEFAULT_QUERY, + dataset.stream().map(n -> Double.valueOf(n.doubleValue() * Long.MAX_VALUE)).collect(toList()), + false, aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(15), histogram -> { final List buckets = histogram.getBuckets(); @@ -267,12 +282,13 @@ public void testNewBucketCreation() throws Exception { assertEquals(expectedCentroid, bucket.min(), doubleError); assertEquals(expectedMax, bucket.max(), doubleError); }); - }); + } + ); } // There should not be more than `shard_size` documents on a node, even when very distant documents appear - public void testNewBucketLimit() throws Exception{ - final List dataset = Arrays.asList(1, 2, 3, 4, 5, 10, 20, 50, 100, 5400, -900); + public void testNewBucketLimit() throws Exception { + final List dataset = Arrays.asList(1, 2, 3, 4, 5, 10, 20, 50, 100, 5400, -900); double doubleError = 1d / 10000d; // Expected clusters: [ (-900, 1, 2, 3, 4, 5), (10, 20, 50, 100, 5400)] @@ -289,8 +305,11 @@ public void testNewBucketLimit() throws Exception{ expectedMaxes.put(-147.5d, 5d); expectedMaxes.put(1116.0d, 5400d); - testSearchCase(DEFAULT_QUERY, dataset, false, - aggregation -> aggregation.field(NUMERIC_FIELD) .setNumBuckets(2).setShardSize(4).setInitialBuffer(5), + testSearchCase( + DEFAULT_QUERY, + dataset, + false, + aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(4).setInitialBuffer(5), histogram -> { final List buckets = histogram.getBuckets(); assertEquals(expectedDocCount.size(), buckets.size()); @@ -299,22 +318,25 @@ public void testNewBucketLimit() throws Exception{ assertEquals(expectedMins.getOrDefault(bucket.getKey(), 0d).doubleValue(), bucket.min(), doubleError); assertEquals(expectedMaxes.getOrDefault(bucket.getKey(), 0d).doubleValue(), bucket.max(), doubleError); }); - }); + } + ); } + public void testSimpleSubAggregations() throws IOException { + final List dataset = Arrays.asList(5, 1, 9, 2, 8); - public void testSimpleSubAggregations() throws IOException{ - final List dataset = Arrays.asList(5, 1, 9, 2, 8); - - testSearchCase(DEFAULT_QUERY, dataset, false, + testSearchCase( + DEFAULT_QUERY, + dataset, + false, aggregation -> aggregation.field(NUMERIC_FIELD) .setNumBuckets(3) .setInitialBuffer(3) .setShardSize(4) .subAggregation(AggregationBuilders.stats("stats").field(NUMERIC_FIELD)), histogram -> { - double deltaError = 1d/10000d; + double deltaError = 1d / 10000d; // Expected clusters: [ (1, 2), (5), (8,9) ] @@ -335,13 +357,17 @@ public void testSimpleSubAggregations() throws IOException{ assertEquals(9, stats.getMax(), deltaError); assertEquals(2, stats.getCount()); assertTrue(AggregationInspectionHelper.hasValue(stats)); - }); + } + ); } - public void testComplexSubAggregations() throws IOException{ - final List dataset = Arrays.asList(5, 4, 3, 2, 1, 0, 6, 7, 8, 9, 10, 11); + public void testComplexSubAggregations() throws IOException { + final List dataset = Arrays.asList(5, 4, 3, 2, 1, 0, 6, 7, 8, 9, 10, 11); - testSearchCase(DEFAULT_QUERY, dataset, false, + testSearchCase( + DEFAULT_QUERY, + dataset, + false, aggregation -> aggregation.field(NUMERIC_FIELD) .setNumBuckets(3) .setInitialBuffer(12) @@ -369,21 +395,22 @@ public void testComplexSubAggregations() throws IOException{ assertEquals(11d, stats.getMax(), deltaError); assertEquals(4L, stats.getCount()); assertTrue(AggregationInspectionHelper.hasValue(stats)); - }); + } + ); } - public void testSubAggregationReduction() throws IOException{ - final List dataset = Arrays.asList(1L, 1L, 1L, 2L, 2L); + public void testSubAggregationReduction() throws IOException { + final List dataset = Arrays.asList(1L, 1L, 1L, 2L, 2L); - testSearchCase(DEFAULT_QUERY, dataset, false, + testSearchCase( + DEFAULT_QUERY, + dataset, + false, aggregation -> aggregation.field(NUMERIC_FIELD) .setNumBuckets(3) .setInitialBuffer(12) .setShardSize(4) - .subAggregation(new TermsAggregationBuilder("terms") - .field(NUMERIC_FIELD) - .shardSize(2) - .size(1)), + .subAggregation(new TermsAggregationBuilder("terms").field(NUMERIC_FIELD).shardSize(2).size(1)), histogram -> { double deltaError = 1d / 10000d; @@ -394,11 +421,12 @@ public void testSubAggregationReduction() throws IOException{ LongTerms terms = histogram.getBuckets().get(0).getAggregations().get("terms"); assertEquals(1L, terms.getBuckets().size(), deltaError); assertEquals(1L, terms.getBuckets().get(0).getKey()); - }); + } + ); } - public void testMultipleSegments() throws IOException{ - final List dataset = Arrays.asList(1001, 1002, 1, 2, 1003, 3, 1004, 1005, 4, 5); + public void testMultipleSegments() throws IOException { + final List dataset = Arrays.asList(1001, 1002, 1, 2, 1003, 3, 1004, 1005, 4, 5); // There should be two clusters: (1, 2, 3, 4, 5) and (1001, 1002, 1003, 1004, 1005) // We can't enable multiple segments per index for many of the tests above, because the clusters are too close. @@ -408,7 +436,10 @@ public void testMultipleSegments() throws IOException{ // To account for this case of a document switching clusters, we check that each cluster centroid is within // a certain range, rather than asserting exact values. - testSearchCase(DEFAULT_QUERY, dataset, true, + testSearchCase( + DEFAULT_QUERY, + dataset, + true, aggregation -> aggregation.field(NUMERIC_FIELD) .setNumBuckets(2) .setInitialBuffer(4) @@ -429,7 +460,8 @@ public void testMultipleSegments() throws IOException{ assertThat(buckets.get(1).getDocCount(), equalTo(dataset.size() - buckets.get(0).getDocCount())); assertThat(buckets.get(1).centroid(), both(greaterThanOrEqualTo(800.0)).and(lessThanOrEqualTo(1005.0))); assertEquals(1005, buckets.get(1).max(), deltaError); - }); + } + ); } @@ -471,40 +503,53 @@ public void testAsSubAggregation() throws IOException { assertThat(e.getMessage(), containsString("cannot be nested")); } - public void testShardSizeTooSmall() throws Exception{ - Exception e = expectThrows(IllegalArgumentException.class, () -> - new VariableWidthHistogramAggregationBuilder("test").setShardSize(1)); + public void testShardSizeTooSmall() throws Exception { + Exception e = expectThrows( + IllegalArgumentException.class, + () -> new VariableWidthHistogramAggregationBuilder("test").setShardSize(1) + ); assertThat(e.getMessage(), equalTo("shard_size must be greater than [1] for [test]")); } public void testSmallShardSize() throws Exception { - Exception e = expectThrows(IllegalArgumentException.class, () -> testSearchCase( - DEFAULT_QUERY, - List.of(), - true, - aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(2), - histogram -> {fail();} - )); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> testSearchCase( + DEFAULT_QUERY, + List.of(), + true, + aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(2), + histogram -> { fail(); } + ) + ); assertThat(e.getMessage(), equalTo("3/4 of shard_size must be at least buckets but was [1<2] for [_name]")); } public void testHugeShardSize() throws Exception { final List dataset = Arrays.asList(1, 2, 3); - testSearchCase(DEFAULT_QUERY, dataset, true, aggregation -> aggregation.field(NUMERIC_FIELD).setShardSize(1000000000), + testSearchCase( + DEFAULT_QUERY, + dataset, + true, + aggregation -> aggregation.field(NUMERIC_FIELD).setShardSize(1000000000), histogram -> assertThat( histogram.getBuckets().stream().map(InternalVariableWidthHistogram.Bucket::getKey).collect(toList()), - equalTo(List.of(1.0, 2.0, 3.0))) + equalTo(List.of(1.0, 2.0, 3.0)) + ) ); } public void testSmallInitialBuffer() throws Exception { - Exception e = expectThrows(IllegalArgumentException.class, () -> testSearchCase( - DEFAULT_QUERY, - List.of(), - true, - aggregation -> aggregation.field(NUMERIC_FIELD).setInitialBuffer(1), - histogram -> {fail();} - )); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> testSearchCase( + DEFAULT_QUERY, + List.of(), + true, + aggregation -> aggregation.field(NUMERIC_FIELD).setInitialBuffer(1), + histogram -> { fail(); } + ) + ); assertThat(e.getMessage(), equalTo("initial_buffer must be at least buckets but was [1<10] for [_name]")); } @@ -536,8 +581,7 @@ public void testDefaultInitialBufferDependsOnNumBuckets() throws Exception { @Override protected IndexSettings createIndexSettings() { - final Settings nodeSettings = Settings.builder() - .put("search.max_buckets", 25000).build(); + final Settings nodeSettings = Settings.builder().put("search.max_buckets", 25000).build(); return new IndexSettings( IndexMetadata.builder("_index") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) @@ -549,9 +593,13 @@ protected IndexSettings createIndexSettings() { ); } - private void testSearchCase(final Query query, final List dataset, boolean multipleSegments, - final Consumer configure, - final Consumer verify) throws IOException { + private void testSearchCase( + final Query query, + final List dataset, + boolean multipleSegments, + final Consumer configure, + final Consumer verify + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { indexSampleData(dataset, indexWriter, multipleSegments); @@ -560,25 +608,22 @@ private void testSearchCase(final Query query, final List dataset, boole try (IndexReader indexReader = DirectoryReader.open(directory)) { final IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - final VariableWidthHistogramAggregationBuilder aggregationBuilder = - new VariableWidthHistogramAggregationBuilder("_name"); + final VariableWidthHistogramAggregationBuilder aggregationBuilder = new VariableWidthHistogramAggregationBuilder("_name"); if (configure != null) { configure.accept(aggregationBuilder); } final MappedFieldType fieldType; - if(dataset.size() == 0 || dataset.get(0) instanceof Double){ + if (dataset.size() == 0 || dataset.get(0) instanceof Double) { fieldType = new NumberFieldMapper.NumberFieldType(aggregationBuilder.field(), NumberFieldMapper.NumberType.DOUBLE); - } else if(dataset.get(0) instanceof Long){ + } else if (dataset.get(0) instanceof Long) { fieldType = new NumberFieldMapper.NumberFieldType(aggregationBuilder.field(), NumberFieldMapper.NumberType.LONG); - } else if (dataset.get(0) instanceof Integer){ + } else if (dataset.get(0) instanceof Integer) { fieldType = new NumberFieldMapper.NumberFieldType(aggregationBuilder.field(), NumberFieldMapper.NumberType.INTEGER); } else { throw new IOException("Test data has an invalid type"); } - - final InternalVariableWidthHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); verify.accept(histogram); } @@ -608,7 +653,7 @@ private void indexSampleData(List dataset, RandomIndexWriter indexWriter } } - long convertDocumentToSortableValue(Number doc) throws IOException{ + long convertDocumentToSortableValue(Number doc) throws IOException { if (doc instanceof Double) { return NumericUtils.doubleToSortableLong(doc.doubleValue()); } else if (doc instanceof Integer) { @@ -619,5 +664,4 @@ long convertDocumentToSortableValue(Number doc) throws IOException{ throw new IOException("Document has an invalid type"); } - } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java index 6d8ca7c68daf0..b66175450ede0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/InternalMissingTests.java @@ -17,8 +17,12 @@ public class InternalMissingTests extends InternalSingleBucketAggregationTestCase { @Override - protected InternalMissing createTestInstance(String name, long docCount, InternalAggregations aggregations, - Map metadata) { + protected InternalMissing createTestInstance( + String name, + long docCount, + InternalAggregations aggregations, + Map metadata + ) { return new InternalMissing(name, docCount, aggregations, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java index fc68909f74644..3d38b40cb0945 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java @@ -18,8 +18,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; @@ -66,26 +66,18 @@ public class MissingAggregatorTests extends AggregatorTestCase { public void testMatchNoDocs() throws IOException { final int numDocs = randomIntBetween(10, 200); - final MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); - - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name") - .field(fieldType.name()); - - testCase( - newMatchAllQuery(), - builder, - writer -> { - for (int i = 0; i < numDocs; i++) { - writer.addDocument(singleton(new SortedNumericDocValuesField(fieldType.name(), randomLong()))); - } - }, - internalMissing -> { - assertEquals(0, internalMissing.getDocCount()); - assertFalse(AggregationInspectionHelper.hasValue(internalMissing)); - }, - singleton(fieldType) - ); + final MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); + + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(fieldType.name()); + + testCase(newMatchAllQuery(), builder, writer -> { + for (int i = 0; i < numDocs; i++) { + writer.addDocument(singleton(new SortedNumericDocValuesField(fieldType.name(), randomLong()))); + } + }, internalMissing -> { + assertEquals(0, internalMissing.getDocCount()); + assertFalse(AggregationInspectionHelper.hasValue(internalMissing)); + }, singleton(fieldType)); } public void testMatchAllDocs() throws IOException { @@ -94,31 +86,23 @@ public void testMatchAllDocs() throws IOException { final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); final MappedFieldType anotherFieldType = new NumberFieldMapper.NumberFieldType("another_field", NumberType.LONG); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name") - .field(aggFieldType.name()); - - testCase( - newMatchAllQuery(), - builder, - writer -> { - for (int i = 0; i < numDocs; i++) { - writer.addDocument(singleton(new SortedNumericDocValuesField(anotherFieldType.name(), randomLong()))); - } - }, - internalMissing -> { - assertEquals(numDocs, internalMissing.getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, - List.of(aggFieldType, anotherFieldType) - ); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()); + + testCase(newMatchAllQuery(), builder, writer -> { + for (int i = 0; i < numDocs; i++) { + writer.addDocument(singleton(new SortedNumericDocValuesField(anotherFieldType.name(), randomLong()))); + } + }, internalMissing -> { + assertEquals(numDocs, internalMissing.getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); + }, List.of(aggFieldType, anotherFieldType)); } public void testMatchSparse() throws IOException { final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); final MappedFieldType anotherFieldType = new NumberFieldMapper.NumberFieldType("another_field", NumberType.LONG); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name") - .field(aggFieldType.name()); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()); final int numDocs = randomIntBetween(100, 200); int docsMissingAggField = 0; @@ -133,16 +117,10 @@ public void testMatchSparse() throws IOException { } final int finalDocsMissingAggField = docsMissingAggField; - testCase( - newMatchAllQuery(), - builder, - writer -> writer.addDocuments(docs), - internalMissing -> { - assertEquals(finalDocsMissingAggField, internalMissing.getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, - List.of(aggFieldType, anotherFieldType) - ); + testCase(newMatchAllQuery(), builder, writer -> writer.addDocuments(docs), internalMissing -> { + assertEquals(finalDocsMissingAggField, internalMissing.getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); + }, List.of(aggFieldType, anotherFieldType)); } public void testMatchSparseRangeField() throws IOException { @@ -154,8 +132,7 @@ public void testMatchSparseRangeField() throws IOException { final BytesRef encodedRange = rangeType.encodeRanges(singleton(range)); final BinaryDocValuesField encodedRangeField = new BinaryDocValuesField(aggFieldType.name(), encodedRange); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name") - .field(aggFieldType.name()); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()); final int numDocs = randomIntBetween(100, 200); int docsMissingAggField = 0; @@ -170,63 +147,42 @@ public void testMatchSparseRangeField() throws IOException { } final int finalDocsMissingAggField = docsMissingAggField; - testCase( - newMatchAllQuery(), - builder, - writer -> writer.addDocuments(docs), - internalMissing -> { - assertEquals(finalDocsMissingAggField, internalMissing.getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, - List.of(aggFieldType, anotherFieldType) - ); + testCase(newMatchAllQuery(), builder, writer -> writer.addDocuments(docs), internalMissing -> { + assertEquals(finalDocsMissingAggField, internalMissing.getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); + }, List.of(aggFieldType, anotherFieldType)); } public void testUnmappedWithoutMissingParam() throws IOException { final int numDocs = randomIntBetween(10, 20); final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name") - .field("unknown_field"); - - testCase( - newMatchAllQuery(), - builder, - writer -> { - for (int i = 0; i < numDocs; i++) { - writer.addDocument(singleton(new SortedNumericDocValuesField(aggFieldType.name(), randomLong()))); - } - }, - internalMissing -> { - assertEquals(numDocs, internalMissing.getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, - singleton(aggFieldType) - ); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field("unknown_field"); + + testCase(newMatchAllQuery(), builder, writer -> { + for (int i = 0; i < numDocs; i++) { + writer.addDocument(singleton(new SortedNumericDocValuesField(aggFieldType.name(), randomLong()))); + } + }, internalMissing -> { + assertEquals(numDocs, internalMissing.getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); + }, singleton(aggFieldType)); } public void testUnmappedWithMissingParam() throws IOException { final int numDocs = randomIntBetween(10, 20); final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name") - .field("unknown_field") - .missing(randomLong()); - - testCase( - newMatchAllQuery(), - builder, - writer -> { - for (int i = 0; i < numDocs; i++) { - writer.addDocument(singleton(new SortedNumericDocValuesField(aggFieldType.name(), randomLong()))); - } - }, - internalMissing -> { - assertEquals(0, internalMissing.getDocCount()); - assertFalse(AggregationInspectionHelper.hasValue(internalMissing)); - }, - singleton(aggFieldType) - ); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field("unknown_field").missing(randomLong()); + + testCase(newMatchAllQuery(), builder, writer -> { + for (int i = 0; i < numDocs; i++) { + writer.addDocument(singleton(new SortedNumericDocValuesField(aggFieldType.name(), randomLong()))); + } + }, internalMissing -> { + assertEquals(0, internalMissing.getDocCount()); + assertFalse(AggregationInspectionHelper.hasValue(internalMissing)); + }, singleton(aggFieldType)); } public void testMissingParam() throws IOException { @@ -235,32 +191,23 @@ public void testMissingParam() throws IOException { final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); final MappedFieldType anotherFieldType = new NumberFieldMapper.NumberFieldType("another_field", NumberType.LONG); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name") - .field(aggFieldType.name()) - .missing(randomLong()); - - testCase( - newMatchAllQuery(), - builder, - writer -> { - for (int i = 0; i < numDocs; i++) { - writer.addDocument(singleton(new SortedNumericDocValuesField(anotherFieldType.name(), randomLong()))); - } - }, - internalMissing -> { - assertEquals(0, internalMissing.getDocCount()); - assertFalse(AggregationInspectionHelper.hasValue(internalMissing)); - }, - List.of(aggFieldType, anotherFieldType) - ); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()).missing(randomLong()); + + testCase(newMatchAllQuery(), builder, writer -> { + for (int i = 0; i < numDocs; i++) { + writer.addDocument(singleton(new SortedNumericDocValuesField(anotherFieldType.name(), randomLong()))); + } + }, internalMissing -> { + assertEquals(0, internalMissing.getDocCount()); + assertFalse(AggregationInspectionHelper.hasValue(internalMissing)); + }, List.of(aggFieldType, anotherFieldType)); } public void testMultiValuedField() throws IOException { final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); final MappedFieldType anotherFieldType = new NumberFieldMapper.NumberFieldType("another_field", NumberType.LONG); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name") - .field(aggFieldType.name()); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()); final int numDocs = randomIntBetween(100, 200); int docsMissingAggField = 0; @@ -268,10 +215,12 @@ public void testMultiValuedField() throws IOException { for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { final long randomLong = randomLong(); - docs.add(Set.of( - new SortedNumericDocValuesField(aggFieldType.name(), randomLong), - new SortedNumericDocValuesField(aggFieldType.name(), randomLong + 1) - )); + docs.add( + Set.of( + new SortedNumericDocValuesField(aggFieldType.name(), randomLong), + new SortedNumericDocValuesField(aggFieldType.name(), randomLong + 1) + ) + ); } else { docs.add(singleton(new SortedNumericDocValuesField(anotherFieldType.name(), randomLong()))); docsMissingAggField++; @@ -279,16 +228,10 @@ public void testMultiValuedField() throws IOException { } final int finalDocsMissingAggField = docsMissingAggField; - testCase( - newMatchAllQuery(), - builder, - writer -> writer.addDocuments(docs), - internalMissing -> { - assertEquals(finalDocsMissingAggField, internalMissing.getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, - List.of(aggFieldType, anotherFieldType) - ); + testCase(newMatchAllQuery(), builder, writer -> writer.addDocuments(docs), internalMissing -> { + assertEquals(finalDocsMissingAggField, internalMissing.getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); + }, List.of(aggFieldType, anotherFieldType)); } public void testSingleValuedFieldWithValueScript() throws IOException { @@ -303,9 +246,7 @@ private void valueScriptTestCase(Script script) throws IOException { final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); final MappedFieldType anotherFieldType = new NumberFieldMapper.NumberFieldType("another_field", NumberType.LONG); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name") - .field(aggFieldType.name()) - .script(script); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()).script(script); final int numDocs = randomIntBetween(100, 200); int docsMissingAggField = 0; @@ -320,16 +261,10 @@ private void valueScriptTestCase(Script script) throws IOException { } final int finalDocsMissingField = docsMissingAggField; - testCase( - newMatchAllQuery(), - builder, - writer -> writer.addDocuments(docs), - internalMissing -> { - assertEquals(finalDocsMissingField, internalMissing.getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, - List.of(aggFieldType, anotherFieldType) - ); + testCase(newMatchAllQuery(), builder, writer -> writer.addDocuments(docs), internalMissing -> { + assertEquals(finalDocsMissingField, internalMissing.getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); + }, List.of(aggFieldType, anotherFieldType)); } public void testMultiValuedFieldWithFieldScriptWithParams() throws IOException { @@ -339,15 +274,16 @@ public void testMultiValuedFieldWithFieldScriptWithParams() throws IOException { } public void testMultiValuedFieldWithFieldScript() throws IOException { - fieldScriptTestCase(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT, singletonMap("field", "agg_field")), - DEFAULT_THRESHOLD_PARAM); + fieldScriptTestCase( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT, singletonMap("field", "agg_field")), + DEFAULT_THRESHOLD_PARAM + ); } private void fieldScriptTestCase(Script script, long threshold) throws IOException { final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name") - .script(script); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").script(script); final int numDocs = randomIntBetween(100, 200); int docsBelowThreshold = 0; @@ -358,30 +294,28 @@ private void fieldScriptTestCase(Script script, long threshold) throws IOExcepti if (firstValue < threshold && secondValue < threshold) { docsBelowThreshold++; } - docs.add(Set.of( - new SortedNumericDocValuesField(aggFieldType.name(), firstValue), - new SortedNumericDocValuesField(aggFieldType.name(), secondValue) - )); + docs.add( + Set.of( + new SortedNumericDocValuesField(aggFieldType.name(), firstValue), + new SortedNumericDocValuesField(aggFieldType.name(), secondValue) + ) + ); } final int finalDocsBelowThreshold = docsBelowThreshold; - testCase( - newMatchAllQuery(), - builder, - writer -> writer.addDocuments(docs), - internalMissing -> { - assertEquals(finalDocsBelowThreshold, internalMissing.getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, - singleton(aggFieldType) - ); + testCase(newMatchAllQuery(), builder, writer -> writer.addDocuments(docs), internalMissing -> { + assertEquals(finalDocsBelowThreshold, internalMissing.getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); + }, singleton(aggFieldType)); } - private void testCase(Query query, - MissingAggregationBuilder builder, - CheckedConsumer writeIndex, - Consumer verify, - Collection fieldTypes) throws IOException { + private void testCase( + Query query, + MissingAggregationBuilder builder, + CheckedConsumer writeIndex, + Consumer verify, + Collection fieldTypes + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { writeIndex.accept(indexWriter); @@ -398,8 +332,7 @@ private void testCase(Query query, @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new MissingAggregationBuilder("_name") - .field(fieldName); + return new MissingAggregationBuilder("_name").field(fieldName); } @Override @@ -443,7 +376,8 @@ protected ScriptService getMockScriptService() { private static List threshold(String fieldName, long threshold, Map vars) { final LeafDocLookup lookup = (LeafDocLookup) vars.get("doc"); - return lookup.get(fieldName).stream() + return lookup.get(fieldName) + .stream() .map(value -> ((Number) value).longValue()) .filter(value -> value >= threshold) .collect(toList()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java index 64f251f3e0050..b9370d365f734 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalNestedTests.java @@ -19,8 +19,12 @@ public class InternalNestedTests extends InternalSingleBucketAggregationTestCase { @Override - protected InternalNested createTestInstance(String name, long docCount, InternalAggregations aggregations, - Map metadata) { + protected InternalNested createTestInstance( + String name, + long docCount, + InternalAggregations aggregations, + Map metadata + ) { return new InternalNested(name, docCount, aggregations, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java index 8d9264c7ae132..57405379e5432 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/InternalReverseNestedTests.java @@ -19,8 +19,12 @@ public class InternalReverseNestedTests extends InternalSingleBucketAggregationTestCase { @Override - protected InternalReverseNested createTestInstance(String name, long docCount, InternalAggregations aggregations, - Map metadata) { + protected InternalReverseNested createTestInstance( + String name, + long docCount, + InternalAggregations aggregations, + Map metadata + ) { return new InternalReverseNested(name, docCount, aggregations, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index a937c03c3ed26..957412b5e0838 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -27,10 +27,10 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; -import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -128,9 +128,7 @@ protected IndexReader wrapDirectoryReader(DirectoryReader reader) throws IOExcep protected ScriptService getMockScriptService() { Map, Object>> scripts = new HashMap<>(); scripts.put(INVERSE_SCRIPT, vars -> -((Number) vars.get("_value")).doubleValue()); - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, - scripts, - Collections.emptyMap()); + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, scripts, Collections.emptyMap()); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); @@ -142,15 +140,17 @@ public void testNoDocs() throws IOException { // intentionally not writing any docs } try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, - NESTED_OBJECT); - MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME) - .field(VALUE_FIELD_NAME); + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, NESTED_OBJECT); + MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME).field(VALUE_FIELD_NAME); nestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), nestedBuilder, fieldType); + InternalNested nested = searchAndReduce( + newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), + nestedBuilder, + fieldType + ); assertEquals(NESTED_AGG, nested.getName()); assertEquals(0, nested.getDocCount()); @@ -172,8 +172,10 @@ public void testSingleNestingMax() throws IOException { for (int i = 0; i < numRootDocs; i++) { List documents = new ArrayList<>(); int numNestedDocs = randomIntBetween(0, 20); - expectedMaxValue = Math.max(expectedMaxValue, - generateMaxDocs(documents, numNestedDocs, i, NESTED_OBJECT, VALUE_FIELD_NAME)); + expectedMaxValue = Math.max( + expectedMaxValue, + generateMaxDocs(documents, numNestedDocs, i, NESTED_OBJECT, VALUE_FIELD_NAME) + ); expectedNestedDocs += numNestedDocs; Document document = new Document(); @@ -186,15 +188,17 @@ public void testSingleNestingMax() throws IOException { iw.commit(); } try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, - NESTED_OBJECT); - MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME) - .field(VALUE_FIELD_NAME); + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, NESTED_OBJECT); + MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME).field(VALUE_FIELD_NAME); nestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), nestedBuilder, fieldType); + InternalNested nested = searchAndReduce( + newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), + nestedBuilder, + fieldType + ); assertEquals(expectedNestedDocs, nested.getDocCount()); assertEquals(NESTED_AGG, nested.getName()); @@ -222,14 +226,15 @@ public void testDoubleNestingMax() throws IOException { for (int i = 0; i < numRootDocs; i++) { List documents = new ArrayList<>(); int numNestedDocs = randomIntBetween(0, 20); - expectedMaxValue = Math.max(expectedMaxValue, - generateMaxDocs(documents, numNestedDocs, i, NESTED_OBJECT + "." + NESTED_OBJECT2, VALUE_FIELD_NAME)); + expectedMaxValue = Math.max( + expectedMaxValue, + generateMaxDocs(documents, numNestedDocs, i, NESTED_OBJECT + "." + NESTED_OBJECT2, VALUE_FIELD_NAME) + ); expectedNestedDocs += numNestedDocs; Document document = new Document(); document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), IdFieldMapper.Defaults.FIELD_TYPE)); - document.add(new Field(NestedPathFieldMapper.NAME, "test", - NestedPathFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(NestedPathFieldMapper.NAME, "test", NestedPathFieldMapper.Defaults.FIELD_TYPE)); document.add(sequenceIDFields.primaryTerm); documents.add(document); iw.addDocuments(documents); @@ -237,16 +242,18 @@ public void testDoubleNestingMax() throws IOException { iw.commit(); } try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, - NESTED_OBJECT + "." + NESTED_OBJECT2); - MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME) - .field(VALUE_FIELD_NAME); + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, NESTED_OBJECT + "." + NESTED_OBJECT2); + MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME).field(VALUE_FIELD_NAME); nestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), nestedBuilder, fieldType); + InternalNested nested = searchAndReduce( + newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), + nestedBuilder, + fieldType + ); assertEquals(expectedNestedDocs, nested.getDocCount()); assertEquals(NESTED_AGG, nested.getName()); @@ -279,13 +286,12 @@ public void testOrphanedDocs() throws IOException { Document document = new Document(); document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), IdFieldMapper.Defaults.FIELD_TYPE)); - document.add(new Field(NestedPathFieldMapper.NAME, "test", - NestedPathFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(NestedPathFieldMapper.NAME, "test", NestedPathFieldMapper.Defaults.FIELD_TYPE)); document.add(sequenceIDFields.primaryTerm); documents.add(document); iw.addDocuments(documents); } - //add some random nested docs that don't belong + // add some random nested docs that don't belong List documents = new ArrayList<>(); int numOrphanedDocs = randomIntBetween(0, 20); generateSumDocs(documents, numOrphanedDocs, 1234, "foo", VALUE_FIELD_NAME); @@ -293,22 +299,23 @@ public void testOrphanedDocs() throws IOException { iw.commit(); } try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, - NESTED_OBJECT); - SumAggregationBuilder sumAgg = new SumAggregationBuilder(SUM_AGG_NAME) - .field(VALUE_FIELD_NAME); + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, NESTED_OBJECT); + SumAggregationBuilder sumAgg = new SumAggregationBuilder(SUM_AGG_NAME).field(VALUE_FIELD_NAME); nestedBuilder.subAggregation(sumAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), nestedBuilder, fieldType); + InternalNested nested = searchAndReduce( + newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), + nestedBuilder, + fieldType + ); assertEquals(expectedNestedDocs, nested.getDocCount()); assertEquals(NESTED_AGG, nested.getName()); assertEquals(expectedNestedDocs, nested.getDocCount()); - InternalSum sum = (InternalSum) - ((InternalAggregation)nested).getProperty(SUM_AGG_NAME); + InternalSum sum = (InternalSum) ((InternalAggregation) nested).getProperty(SUM_AGG_NAME); assertEquals(SUM_AGG_NAME, sum.getName()); assertEquals(expectedSum, sum.getValue(), Double.MIN_VALUE); } @@ -375,16 +382,19 @@ public void testResetRootDocId() throws Exception { } try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, - "nested_field"); + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, "nested_field"); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST); bq.add(new TermQuery(new Term(IdFieldMapper.NAME, Uid.encodeId("2"))), BooleanClause.Occur.MUST_NOT); - InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), - new ConstantScoreQuery(bq.build()), nestedBuilder, fieldType); + InternalNested nested = searchAndReduce( + newSearcher(indexReader, false, true), + new ConstantScoreQuery(bq.build()), + nestedBuilder, + fieldType + ); assertEquals(NESTED_AGG, nested.getName()); // The bug manifests if 6 docs are returned, because currentRootDoc isn't reset the previous child docs from the first @@ -399,30 +409,35 @@ public void testResetRootDocId() throws Exception { public void testNestedOrdering() throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocuments(generateBook("1", new String[]{"a"}, new int[]{12, 13, 14})); - iw.addDocuments(generateBook("2", new String[]{"b"}, new int[]{5, 50})); - iw.addDocuments(generateBook("3", new String[]{"c"}, new int[]{39, 19})); - iw.addDocuments(generateBook("4", new String[]{"d"}, new int[]{2, 1, 3})); - iw.addDocuments(generateBook("5", new String[]{"a"}, new int[]{70, 10})); - iw.addDocuments(generateBook("6", new String[]{"e"}, new int[]{23, 21})); - iw.addDocuments(generateBook("7", new String[]{"e", "a"}, new int[]{8, 8})); - iw.addDocuments(generateBook("8", new String[]{"f"}, new int[]{12, 14})); - iw.addDocuments(generateBook("9", new String[]{"g", "c", "e"}, new int[]{18, 8})); + iw.addDocuments(generateBook("1", new String[] { "a" }, new int[] { 12, 13, 14 })); + iw.addDocuments(generateBook("2", new String[] { "b" }, new int[] { 5, 50 })); + iw.addDocuments(generateBook("3", new String[] { "c" }, new int[] { 39, 19 })); + iw.addDocuments(generateBook("4", new String[] { "d" }, new int[] { 2, 1, 3 })); + iw.addDocuments(generateBook("5", new String[] { "a" }, new int[] { 70, 10 })); + iw.addDocuments(generateBook("6", new String[] { "e" }, new int[] { 23, 21 })); + iw.addDocuments(generateBook("7", new String[] { "e", "a" }, new int[] { 8, 8 })); + iw.addDocuments(generateBook("8", new String[] { "f" }, new int[] { 12, 14 })); + iw.addDocuments(generateBook("9", new String[] { "g", "c", "e" }, new int[] { 18, 8 })); } try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - MappedFieldType fieldType1 - = new NumberFieldMapper.NumberFieldType("num_pages", NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("num_pages", NumberFieldMapper.NumberType.LONG); MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType("author"); TermsAggregationBuilder termsBuilder = new TermsAggregationBuilder("authors").userValueTypeHint(ValueType.STRING) - .field("author").order(BucketOrder.aggregation("chapters>num_pages.value", true)); + .field("author") + .order(BucketOrder.aggregation("chapters>num_pages.value", true)); NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder("chapters", "nested_chapters"); MaxAggregationBuilder maxAgg = new MaxAggregationBuilder("num_pages").field("num_pages"); nestedBuilder.subAggregation(maxAgg); termsBuilder.subAggregation(nestedBuilder); - Terms terms = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2); + Terms terms = searchAndReduce( + newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), + termsBuilder, + fieldType1, + fieldType2 + ); assertEquals(7, terms.getBuckets().size()); assertEquals("authors", terms.getName()); @@ -464,14 +479,20 @@ public void testNestedOrdering() throws IOException { // reverse order: termsBuilder = new TermsAggregationBuilder("authors").userValueTypeHint(ValueType.STRING) - .field("author").order(BucketOrder.aggregation("chapters>num_pages.value", false)); + .field("author") + .order(BucketOrder.aggregation("chapters>num_pages.value", false)); nestedBuilder = new NestedAggregationBuilder("chapters", "nested_chapters"); maxAgg = new MaxAggregationBuilder("num_pages").field("num_pages"); nestedBuilder.subAggregation(maxAgg); termsBuilder.subAggregation(nestedBuilder); - terms = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2); + terms = searchAndReduce( + newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), + termsBuilder, + fieldType1, + fieldType2 + ); assertEquals(7, terms.getBuckets().size()); assertEquals("authors", terms.getName()); @@ -529,9 +550,7 @@ public void testNestedOrdering_random() throws IOException { try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { int id = 0; for (Tuple book : books) { - iw.addDocuments(generateBook( - String.format(Locale.ROOT, "%03d", id), new String[]{book.v1()}, book.v2()) - ); + iw.addDocuments(generateBook(String.format(Locale.ROOT, "%03d", id), new String[] { book.v1() }, book.v2())); id++; } } @@ -547,20 +566,25 @@ public void testNestedOrdering_random() throws IOException { } }); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - MappedFieldType fieldType1 - = new NumberFieldMapper.NumberFieldType("num_pages", NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("num_pages", NumberFieldMapper.NumberType.LONG); MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType("author"); TermsAggregationBuilder termsBuilder = new TermsAggregationBuilder("authors").userValueTypeHint(ValueType.STRING) - .size(books.size()).field("author") + .size(books.size()) + .field("author") .order(BucketOrder.compound(BucketOrder.aggregation("chapters>num_pages.value", true), BucketOrder.key(true))); NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder("chapters", "nested_chapters"); MinAggregationBuilder minAgg = new MinAggregationBuilder("num_pages").field("num_pages"); nestedBuilder.subAggregation(minAgg); termsBuilder.subAggregation(nestedBuilder); - Terms terms = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2); + Terms terms = searchAndReduce( + newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), + termsBuilder, + fieldType1, + fieldType2 + ); assertEquals(books.size(), terms.getBuckets().size()); assertEquals("authors", terms.getName()); @@ -645,8 +669,7 @@ public void testPreGetChildLeafCollectors() throws IOException { try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { TermsAggregationBuilder valueBuilder = new TermsAggregationBuilder("value").userValueTypeHint(ValueType.STRING) .field("value"); - TermsAggregationBuilder keyBuilder = new TermsAggregationBuilder("key").userValueTypeHint(ValueType.STRING) - .field("key"); + TermsAggregationBuilder keyBuilder = new TermsAggregationBuilder("key").userValueTypeHint(ValueType.STRING).field("key"); keyBuilder.subAggregation(valueBuilder); NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, "nested_field"); nestedBuilder.subAggregation(keyBuilder); @@ -656,8 +679,13 @@ public void testPreGetChildLeafCollectors() throws IOException { MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("key"); MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType("value"); - Filter filter = searchAndReduce(newSearcher(indexReader, false, true), - Queries.newNonNestedFilter(), filterAggregationBuilder, fieldType1, fieldType2); + Filter filter = searchAndReduce( + newSearcher(indexReader, false, true), + Queries.newNonNestedFilter(), + filterAggregationBuilder, + fieldType1, + fieldType2 + ); assertEquals("filterAgg", filter.getName()); assertEquals(3L, filter.getDocCount()); @@ -702,8 +730,7 @@ public void testFieldAlias() throws IOException { Document document = new Document(); document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), IdFieldMapper.Defaults.FIELD_TYPE)); - document.add(new Field(NestedPathFieldMapper.NAME, "test", - NestedPathFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(NestedPathFieldMapper.NAME, "test", NestedPathFieldMapper.Defaults.FIELD_TYPE)); document.add(sequenceIDFields.primaryTerm); documents.add(document); iw.addDocuments(documents); @@ -712,15 +739,13 @@ public void testFieldAlias() throws IOException { } try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - NestedAggregationBuilder agg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( - max(MAX_AGG_NAME).field(VALUE_FIELD_NAME)); + NestedAggregationBuilder agg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation(max(MAX_AGG_NAME).field(VALUE_FIELD_NAME)); NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( - max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias")); + max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias") + ); - InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), agg, fieldType); - Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), aliasAgg, fieldType); + InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), agg, fieldType); + Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), aliasAgg, fieldType); assertEquals(nested, aliasNested); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -740,14 +765,12 @@ public void testNestedWithPipeline() throws IOException { try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { for (int i = 0; i < numRootDocs; i++) { List documents = new ArrayList<>(); - expectedMaxValue = Math.max(expectedMaxValue, - generateMaxDocs(documents, 1, i, NESTED_OBJECT, VALUE_FIELD_NAME)); + expectedMaxValue = Math.max(expectedMaxValue, generateMaxDocs(documents, 1, i, NESTED_OBJECT, VALUE_FIELD_NAME)); expectedNestedDocs += 1; Document document = new Document(); document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), IdFieldMapper.Defaults.FIELD_TYPE)); - document.add(new Field(NestedPathFieldMapper.NAME, "test", - NestedPathFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(NestedPathFieldMapper.NAME, "test", NestedPathFieldMapper.Defaults.FIELD_TYPE)); document.add(sequenceIDFields.primaryTerm); documents.add(document); iw.addDocuments(documents); @@ -755,18 +778,27 @@ public void testNestedWithPipeline() throws IOException { iw.commit(); } try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, NESTED_OBJECT) - .subAggregation(new TermsAggregationBuilder("terms").field(VALUE_FIELD_NAME).userValueTypeHint(ValueType.NUMERIC) + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, NESTED_OBJECT).subAggregation( + new TermsAggregationBuilder("terms").field(VALUE_FIELD_NAME) + .userValueTypeHint(ValueType.NUMERIC) .subAggregation(new MaxAggregationBuilder(MAX_AGG_NAME).field(VALUE_FIELD_NAME)) - .subAggregation(new BucketScriptPipelineAggregationBuilder("bucketscript", - Collections.singletonMap("_value", MAX_AGG_NAME), - new Script(ScriptType.INLINE, MockScriptEngine.NAME, INVERSE_SCRIPT, Collections.emptyMap())))); + .subAggregation( + new BucketScriptPipelineAggregationBuilder( + "bucketscript", + Collections.singletonMap("_value", MAX_AGG_NAME), + new Script(ScriptType.INLINE, MockScriptEngine.NAME, INVERSE_SCRIPT, Collections.emptyMap()) + ) + ) + ); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), nestedBuilder, fieldType); + InternalNested nested = searchAndReduce( + newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), + nestedBuilder, + fieldType + ); assertEquals(expectedNestedDocs, nested.getDocCount()); assertEquals(NESTED_AGG, nested.getName()); @@ -793,21 +825,29 @@ public void testNestedUnderTerms() throws IOException { int numProducts = scaledRandomIntBetween(1, 100); int numResellers = scaledRandomIntBetween(1, 100); - AggregationBuilder b = new TermsAggregationBuilder("products").field("product_id").size(numProducts) - .subAggregation(new NestedAggregationBuilder("nested", "nested_reseller") - .subAggregation(new TermsAggregationBuilder("resellers").field("reseller_id").size(numResellers))); + AggregationBuilder b = new TermsAggregationBuilder("products").field("product_id") + .size(numProducts) + .subAggregation( + new NestedAggregationBuilder("nested", "nested_reseller").subAggregation( + new TermsAggregationBuilder("resellers").field("reseller_id").size(numResellers) + ) + ); testCase(b, new MatchAllDocsQuery(), buildResellerData(numProducts, numResellers), result -> { LongTerms products = (LongTerms) result; - assertThat(products.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), - equalTo(LongStream.range(0, numProducts).mapToObj(Long::valueOf).collect(toList()))); + assertThat( + products.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), + equalTo(LongStream.range(0, numProducts).mapToObj(Long::valueOf).collect(toList())) + ); for (int p = 0; p < numProducts; p++) { LongTerms.Bucket bucket = products.getBucketByKey(Integer.toString(p)); assertThat(bucket.getDocCount(), equalTo(1L)); InternalNested nested = bucket.getAggregations().get("nested"); assertThat(nested.getDocCount(), equalTo((long) numResellers)); LongTerms resellers = nested.getAggregations().get("resellers"); - assertThat(resellers.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), - equalTo(LongStream.range(0, numResellers).mapToObj(Long::valueOf).collect(toList()))); + assertThat( + resellers.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), + equalTo(LongStream.range(0, numResellers).mapToObj(Long::valueOf).collect(toList())) + ); } }, resellersMappedFields()); } @@ -832,16 +872,13 @@ public static CheckedConsumer buildResellerData( } public static MappedFieldType[] resellersMappedFields() { - MappedFieldType productIdField - = new NumberFieldMapper.NumberFieldType("product_id", NumberFieldMapper.NumberType.LONG); - MappedFieldType resellerIdField - = new NumberFieldMapper.NumberFieldType("reseller_id", NumberFieldMapper.NumberType.LONG); - return new MappedFieldType[] {productIdField, resellerIdField}; + MappedFieldType productIdField = new NumberFieldMapper.NumberFieldType("product_id", NumberFieldMapper.NumberType.LONG); + MappedFieldType resellerIdField = new NumberFieldMapper.NumberFieldType("reseller_id", NumberFieldMapper.NumberType.LONG); + return new MappedFieldType[] { productIdField, resellerIdField }; } private double generateMaxDocs(List documents, int numNestedDocs, int id, String path, String fieldName) { - return DoubleStream.of(generateDocuments(documents, numNestedDocs, id, path, fieldName)) - .max().orElse(Double.NEGATIVE_INFINITY); + return DoubleStream.of(generateDocuments(documents, numNestedDocs, id, path, fieldName)).max().orElse(Double.NEGATIVE_INFINITY); } private double generateSumDocs(List documents, int numNestedDocs, int id, String path, String fieldName) { @@ -852,10 +889,8 @@ private static double[] generateDocuments(List documents, int numNeste double[] values = new double[numNestedDocs]; for (int nested = 0; nested < numNestedDocs; nested++) { Document document = new Document(); - document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(id)), - IdFieldMapper.Defaults.NESTED_FIELD_TYPE)); - document.add(new Field(NestedPathFieldMapper.NAME, path, - NestedPathFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(id)), IdFieldMapper.Defaults.NESTED_FIELD_TYPE)); + document.add(new Field(NestedPathFieldMapper.NAME, path, NestedPathFieldMapper.Defaults.FIELD_TYPE)); long value = randomNonNegativeLong() % 10000; document.add(new SortedNumericDocValuesField(fieldName, value)); documents.add(document); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index 4ea5d069e3965..e8b21f1cd8b63 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -64,25 +64,19 @@ public void testNoDocs() throws IOException { // intentionally not writing any docs } try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, - NESTED_OBJECT); - ReverseNestedAggregationBuilder reverseNestedBuilder - = new ReverseNestedAggregationBuilder(REVERSE_AGG_NAME); + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, NESTED_OBJECT); + ReverseNestedAggregationBuilder reverseNestedBuilder = new ReverseNestedAggregationBuilder(REVERSE_AGG_NAME); nestedBuilder.subAggregation(reverseNestedBuilder); - MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME) - .field(VALUE_FIELD_NAME); + MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME).field(VALUE_FIELD_NAME); reverseNestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - Nested nested = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), nestedBuilder, fieldType); - ReverseNested reverseNested = (ReverseNested) - ((InternalAggregation)nested).getProperty(REVERSE_AGG_NAME); + Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); + ReverseNested reverseNested = (ReverseNested) ((InternalAggregation) nested).getProperty(REVERSE_AGG_NAME); assertEquals(REVERSE_AGG_NAME, reverseNested.getName()); assertEquals(0, reverseNested.getDocCount()); - InternalMax max = (InternalMax) - ((InternalAggregation)reverseNested).getProperty(MAX_AGG_NAME); + InternalMax max = (InternalMax) ((InternalAggregation) reverseNested).getProperty(MAX_AGG_NAME); assertEquals(MAX_AGG_NAME, max.getName()); assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), Double.MIN_VALUE); } @@ -101,18 +95,16 @@ public void testMaxFromParentDocs() throws IOException { int numNestedDocs = randomIntBetween(0, 20); for (int nested = 0; nested < numNestedDocs; nested++) { Document document = new Document(); - document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), - IdFieldMapper.Defaults.NESTED_FIELD_TYPE)); - document.add(new Field(NestedPathFieldMapper.NAME, NESTED_OBJECT, - NestedPathFieldMapper.Defaults.FIELD_TYPE)); + document.add( + new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), IdFieldMapper.Defaults.NESTED_FIELD_TYPE) + ); + document.add(new Field(NestedPathFieldMapper.NAME, NESTED_OBJECT, NestedPathFieldMapper.Defaults.FIELD_TYPE)); documents.add(document); expectedNestedDocs++; } Document document = new Document(); - document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), - IdFieldMapper.Defaults.FIELD_TYPE)); - document.add(new Field(NestedPathFieldMapper.NAME, "test", - NestedPathFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), IdFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(NestedPathFieldMapper.NAME, "test", NestedPathFieldMapper.Defaults.FIELD_TYPE)); long value = randomNonNegativeLong() % 10000; document.add(new SortedNumericDocValuesField(VALUE_FIELD_NAME, value)); document.add(SeqNoFieldMapper.SequenceIDFields.emptySeqID().primaryTerm); @@ -126,27 +118,21 @@ public void testMaxFromParentDocs() throws IOException { iw.commit(); } try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, - NESTED_OBJECT); - ReverseNestedAggregationBuilder reverseNestedBuilder - = new ReverseNestedAggregationBuilder(REVERSE_AGG_NAME); + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, NESTED_OBJECT); + ReverseNestedAggregationBuilder reverseNestedBuilder = new ReverseNestedAggregationBuilder(REVERSE_AGG_NAME); nestedBuilder.subAggregation(reverseNestedBuilder); - MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME) - .field(VALUE_FIELD_NAME); + MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME).field(VALUE_FIELD_NAME); reverseNestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - Nested nested = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), nestedBuilder, fieldType); + Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); assertEquals(expectedNestedDocs, nested.getDocCount()); - ReverseNested reverseNested = (ReverseNested) - ((InternalAggregation)nested).getProperty(REVERSE_AGG_NAME); + ReverseNested reverseNested = (ReverseNested) ((InternalAggregation) nested).getProperty(REVERSE_AGG_NAME); assertEquals(REVERSE_AGG_NAME, reverseNested.getName()); assertEquals(expectedParentDocs, reverseNested.getDocCount()); - InternalMax max = (InternalMax) - ((InternalAggregation)reverseNested).getProperty(MAX_AGG_NAME); + InternalMax max = (InternalMax) ((InternalAggregation) reverseNested).getProperty(MAX_AGG_NAME); assertEquals(MAX_AGG_NAME, max.getName()); assertEquals(expectedMaxValue, max.getValue(), Double.MIN_VALUE); } @@ -170,17 +156,15 @@ public void testFieldAlias() throws IOException { for (int nested = 0; nested < numNestedDocs; nested++) { Document document = new Document(); - document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), - IdFieldMapper.Defaults.NESTED_FIELD_TYPE)); - document.add(new Field(NestedPathFieldMapper.NAME, NESTED_OBJECT, - NestedPathFieldMapper.Defaults.FIELD_TYPE)); + document.add( + new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), IdFieldMapper.Defaults.NESTED_FIELD_TYPE) + ); + document.add(new Field(NestedPathFieldMapper.NAME, NESTED_OBJECT, NestedPathFieldMapper.Defaults.FIELD_TYPE)); documents.add(document); } Document document = new Document(); - document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), - IdFieldMapper.Defaults.FIELD_TYPE)); - document.add(new Field(NestedPathFieldMapper.NAME, "test", - NestedPathFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), IdFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(NestedPathFieldMapper.NAME, "test", NestedPathFieldMapper.Defaults.FIELD_TYPE)); long value = randomNonNegativeLong() % 10000; document.add(new SortedNumericDocValuesField(VALUE_FIELD_NAME, value)); @@ -196,14 +180,14 @@ public void testFieldAlias() throws IOException { MaxAggregationBuilder aliasMaxAgg = max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias"); NestedAggregationBuilder agg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( - reverseNested(REVERSE_AGG_NAME).subAggregation(maxAgg)); + reverseNested(REVERSE_AGG_NAME).subAggregation(maxAgg) + ); NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( - reverseNested(REVERSE_AGG_NAME).subAggregation(aliasMaxAgg)); + reverseNested(REVERSE_AGG_NAME).subAggregation(aliasMaxAgg) + ); - Nested nested = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), agg, fieldType); - Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), aliasAgg, fieldType); + Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), agg, fieldType); + Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), aliasAgg, fieldType); ReverseNested reverseNested = nested.getAggregations().get(REVERSE_AGG_NAME); ReverseNested aliasReverseNested = aliasNested.getAggregations().get(REVERSE_AGG_NAME); @@ -218,24 +202,33 @@ public void testNestedUnderTerms() throws IOException { int numProducts = scaledRandomIntBetween(1, 100); int numResellers = scaledRandomIntBetween(1, 100); - AggregationBuilder b = new NestedAggregationBuilder("nested", "nested_reseller") - .subAggregation(new TermsAggregationBuilder("resellers").field("reseller_id").size(numResellers) - .subAggregation(new ReverseNestedAggregationBuilder("reverse_nested") - .subAggregation(new TermsAggregationBuilder("products").field("product_id").size(numProducts)))); + AggregationBuilder b = new NestedAggregationBuilder("nested", "nested_reseller").subAggregation( + new TermsAggregationBuilder("resellers").field("reseller_id") + .size(numResellers) + .subAggregation( + new ReverseNestedAggregationBuilder("reverse_nested").subAggregation( + new TermsAggregationBuilder("products").field("product_id").size(numProducts) + ) + ) + ); testCase(b, new MatchAllDocsQuery(), NestedAggregatorTests.buildResellerData(numProducts, numResellers), result -> { InternalNested nested = (InternalNested) result; assertThat(nested.getDocCount(), equalTo((long) numProducts * numResellers)); LongTerms resellers = nested.getAggregations().get("resellers"); - assertThat(resellers.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), - equalTo(LongStream.range(0, numResellers).mapToObj(Long::valueOf).collect(toList()))); + assertThat( + resellers.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), + equalTo(LongStream.range(0, numResellers).mapToObj(Long::valueOf).collect(toList())) + ); for (int r = 0; r < numResellers; r++) { LongTerms.Bucket bucket = resellers.getBucketByKey(Integer.toString(r)); assertThat(bucket.getDocCount(), equalTo((long) numProducts)); InternalReverseNested reverseNested = bucket.getAggregations().get("reverse_nested"); assertThat(reverseNested.getDocCount(), equalTo((long) numProducts)); LongTerms products = reverseNested.getAggregations().get("products"); - assertThat(products.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), - equalTo(LongStream.range(0, numProducts).mapToObj(Long::valueOf).collect(toList()))); + assertThat( + products.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), + equalTo(LongStream.range(0, numProducts).mapToObj(Long::valueOf).collect(toList())) + ); } }, NestedAggregatorTests.resellersMappedFields()); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java index 73f9ddc16ff15..bcb83919d6a77 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java @@ -7,10 +7,7 @@ */ package org.elasticsearch.search.aggregations.bucket.range; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; +import com.carrotsearch.hppc.LongHashSet; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.TestUtil; @@ -21,7 +18,10 @@ import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregator.SortedSetRangeLeafCollector; import org.elasticsearch.test.ESTestCase; -import com.carrotsearch.hppc.LongHashSet; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; public class BinaryRangeAggregatorTests extends ESTestCase { @@ -73,9 +73,11 @@ private void doTestSortedSetRangeLeafCollector(int maxNumValuesPerDoc) throws Ex final int numRanges = randomIntBetween(1, 10); BinaryRangeAggregator.Range[] ranges = new BinaryRangeAggregator.Range[numRanges]; for (int i = 0; i < numRanges; ++i) { - ranges[i] = new BinaryRangeAggregator.Range(Integer.toString(i), - randomBoolean() ? null : new BytesRef(TestUtil.randomSimpleString(random(), randomInt(2))), - randomBoolean() ? null : new BytesRef(TestUtil.randomSimpleString(random(), randomInt(2)))); + ranges[i] = new BinaryRangeAggregator.Range( + Integer.toString(i), + randomBoolean() ? null : new BytesRef(TestUtil.randomSimpleString(random(), randomInt(2))), + randomBoolean() ? null : new BytesRef(TestUtil.randomSimpleString(random(), randomInt(2))) + ); } Arrays.sort(ranges, BinaryRangeAggregator.RANGE_COMPARATOR); @@ -108,7 +110,7 @@ protected void doCollect(LeafBucketCollector sub, int doc, long bucket) throws I for (long ord : ords) { BytesRef term = terms[(int) ord]; if ((ranges[i].from == null || ranges[i].from.compareTo(term) <= 0) - && (ranges[i].to == null || ranges[i].to.compareTo(term) > 0)) { + && (ranges[i].to == null || ranges[i].to.compareTo(term) > 0)) { expectedCounts[i]++; break; } @@ -172,9 +174,11 @@ private void doTestSortedBinaryRangeLeafCollector(int maxNumValuesPerDoc) throws final int numRanges = randomIntBetween(1, 10); BinaryRangeAggregator.Range[] ranges = new BinaryRangeAggregator.Range[numRanges]; for (int i = 0; i < numRanges; ++i) { - ranges[i] = new BinaryRangeAggregator.Range(Integer.toString(i), - randomBoolean() ? null : new BytesRef(TestUtil.randomSimpleString(random(), randomInt(2))), - randomBoolean() ? null : new BytesRef(TestUtil.randomSimpleString(random(), randomInt(2)))); + ranges[i] = new BinaryRangeAggregator.Range( + Integer.toString(i), + randomBoolean() ? null : new BytesRef(TestUtil.randomSimpleString(random(), randomInt(2))), + randomBoolean() ? null : new BytesRef(TestUtil.randomSimpleString(random(), randomInt(2))) + ); } Arrays.sort(ranges, BinaryRangeAggregator.RANGE_COMPARATOR); @@ -207,7 +211,7 @@ protected void doCollect(LeafBucketCollector sub, int doc, long bucket) throws I for (long ord : ords) { BytesRef term = terms[(int) ord]; if ((ranges[i].from == null || ranges[i].from.compareTo(term) <= 0) - && (ranges[i].to == null || ranges[i].to.compareTo(term) > 0)) { + && (ranges[i].to == null || ranges[i].to.compareTo(term) > 0)) { expectedCounts[i]++; break; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java index c9b267393a126..130ad41e4d887 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java @@ -58,17 +58,11 @@ public class DateRangeAggregatorTests extends AggregatorTestCase { public void testBooleanFieldDeprecated() throws IOException { final String fieldName = "bogusBoolean"; - testCase( - new DateRangeAggregationBuilder("name").field(fieldName).addRange("false", "true"), - new MatchAllDocsQuery(), - iw -> { - Document d = new Document(); - d.add(new SortedNumericDocValuesField(fieldName, 0)); - iw.addDocument(d); - }, - a -> {}, - new BooleanFieldMapper.BooleanFieldType(fieldName) - ); + testCase(new DateRangeAggregationBuilder("name").field(fieldName).addRange("false", "true"), new MatchAllDocsQuery(), iw -> { + Document d = new Document(); + d.add(new SortedNumericDocValuesField(fieldName, 0)); + iw.addDocument(d); + }, a -> {}, new BooleanFieldMapper.BooleanFieldType(fieldName)); assertWarnings("Running Range or DateRange aggregations on [boolean] fields is deprecated"); } @@ -132,11 +126,10 @@ public void testMatchesNumericDocValues() throws IOException { }); } - public void testMissingDateStringWithDateField() throws IOException { + public void testMissingDateStringWithDateField() throws IOException { DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD_NAME); - DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range") - .field(DATE_FIELD_NAME) + DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field(DATE_FIELD_NAME) .missing("2015-11-13T16:14:34") .addRange("2015-11-13", "2015-11-14"); @@ -194,28 +187,23 @@ public void testUnboundedRanges() throws IOException { ); } - public void testNumberFieldDateRanges() throws IOException { - DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range") - .field(NUMBER_FIELD_NAME) + public void testNumberFieldDateRanges() throws IOException { + DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field(NUMBER_FIELD_NAME) .addRange("2015-11-13", "2015-11-14"); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); - expectThrows(NumberFormatException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); + }, range -> fail("Should have thrown exception"), fieldType)); } - public void testNumberFieldNumberRanges() throws IOException { - DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range") - .field(NUMBER_FIELD_NAME) + public void testNumberFieldNumberRanges() throws IOException { + DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field(NUMBER_FIELD_NAME) .addRange(0, 5); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(List.of(new NumericDocValuesField(NUMBER_FIELD_NAME, 7), new IntPoint(NUMBER_FIELD_NAME, 7))); @@ -228,30 +216,25 @@ public void testNumberFieldNumberRanges() throws IOException { }, fieldType); } - public void testMissingDateStringWithNumberField() throws IOException { - DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range") - .field(NUMBER_FIELD_NAME) + public void testMissingDateStringWithNumberField() throws IOException { + DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field(NUMBER_FIELD_NAME) .addRange("2015-11-13", "2015-11-14") .missing("1979-01-01T00:00:00"); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); - expectThrows(NumberFormatException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); + }, range -> fail("Should have thrown exception"), fieldType)); } public void testUnmappedWithMissingNumber() throws IOException { - DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range") - .field("does_not_exist") + DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field("does_not_exist") .addRange("2015-11-13", "2015-11-14") .missing(1447438575000L); // 2015-11-13 6:16:15 - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); @@ -265,70 +248,66 @@ public void testUnmappedWithMissingNumber() throws IOException { } public void testUnmappedWithMissingDate() throws IOException { - DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range") - .field("does_not_exist") + DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field("does_not_exist") .addRange("2015-11-13", "2015-11-14") .missing("2015-11-13T10:11:12"); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); - testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> { - List ranges = range.getBuckets(); - assertEquals(1, ranges.size()); - assertEquals(2, ranges.get(0).getDocCount()); - assertTrue(AggregationInspectionHelper.hasValue(range)); - }, fieldType); + testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); + }, range -> { + List ranges = range.getBuckets(); + assertEquals(1, ranges.size()); + assertEquals(2, ranges.get(0).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(range)); + }, fieldType); } public void testKeywordField() { - DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range") - .field("not_a_number") + DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field("not_a_number") .addRange("2015-11-13", "2015-11-14"); MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("not_a_number"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); - }, range -> fail("Should have thrown exception"), fieldType)); - assertEquals("Field [not_a_number] of type [keyword] is not supported for aggregation [date_range]", - e.getMessage()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> testCase( + aggregationBuilder, + new MatchAllDocsQuery(), + iw -> { iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); }, + range -> fail("Should have thrown exception"), + fieldType + ) + ); + assertEquals("Field [not_a_number] of type [keyword] is not supported for aggregation [date_range]", e.getMessage()); } public void testBadMissingField() { - DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range") - .field(NUMBER_FIELD_NAME) + DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field(NUMBER_FIELD_NAME) .addRange("2020-01-01T00:00:00", "2020-01-02T00:00:00") .missing("bogus"); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); - expectThrows(NumberFormatException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); + }, range -> fail("Should have thrown exception"), fieldType)); } public void testUnmappedWithBadMissingField() { - DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range") - .field("does_not_exist") + DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field("does_not_exist") .addRange("2020-01-01T00:00:00", "2020-01-02T00:00:00") .missing("bogus"); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); - expectThrows(ElasticsearchParseException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + expectThrows(ElasticsearchParseException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); + }, range -> fail("Should have thrown exception"), fieldType)); } private void testBothResolutions( @@ -350,12 +329,23 @@ private void testBothResolutions( ); } - private void testCase(Query query, - CheckedConsumer buildIndex, - Consumer>> verify, - DateFieldMapper.Resolution resolution) throws IOException { - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD_NAME, true, false, true, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, resolution, null, null, Collections.emptyMap()); + private void testCase( + Query query, + CheckedConsumer buildIndex, + Consumer>> verify, + DateFieldMapper.Resolution resolution + ) throws IOException { + DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType( + DATE_FIELD_NAME, + true, + false, + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + resolution, + null, + null, + Collections.emptyMap() + ); DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("test_range_agg"); aggregationBuilder.field(DATE_FIELD_NAME); aggregationBuilder.addRange("2015-01-01", "2015-12-31"); @@ -363,11 +353,13 @@ private void testCase(Query query, testCase(aggregationBuilder, query, buildIndex, verify, fieldType); } - private void testCase(DateRangeAggregationBuilder aggregationBuilder, - Query query, - CheckedConsumer buildIndex, - Consumer>> verify, - MappedFieldType fieldType) throws IOException { + private void testCase( + DateRangeAggregationBuilder aggregationBuilder, + Query query, + CheckedConsumer buildIndex, + Consumer>> verify, + MappedFieldType fieldType + ) throws IOException { try (Directory directory = newDirectory()) { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); buildIndex.accept(indexWriter); @@ -376,8 +368,12 @@ private void testCase(DateRangeAggregationBuilder aggregationBuilder, try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - InternalRange> agg = searchAndReduce(indexSearcher, - query, aggregationBuilder, fieldType); + InternalRange> agg = searchAndReduce( + indexSearcher, + query, + aggregationBuilder, + fieldType + ); verify.accept(agg); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java index a520c4fa893eb..3b82786de7abc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java @@ -58,17 +58,19 @@ public void setUp() throws Exception { } @Override - protected InternalBinaryRange createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - boolean keyed) { + protected InternalBinaryRange createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + boolean keyed + ) { DocValueFormat format = DocValueFormat.RAW; List buckets = new ArrayList<>(); - int nullKey = randomBoolean() ? randomIntBetween(0, ranges.size() -1) : -1; + int nullKey = randomBoolean() ? randomIntBetween(0, ranges.size() - 1) : -1; for (int i = 0; i < ranges.size(); ++i) { final int docCount = randomIntBetween(1, 100); - final String key = (i == nullKey) ? null: randomAlphaOfLength(10); + final String key = (i == nullKey) ? null : randomAlphaOfLength(10); buckets.add(new InternalBinaryRange.Bucket(format, keyed, key, ranges.get(i).v1(), ranges.get(i).v2(), docCount, aggregations)); } return new InternalBinaryRange(name, format, keyed, buckets, metadata); @@ -91,7 +93,7 @@ protected void assertReduced(InternalBinaryRange reduced, List buckets = instance.getBuckets(); Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - keyed = keyed == false; - break; - case 2: - buckets = new ArrayList<>(buckets); - buckets.add(new InternalBinaryRange.Bucket(format, keyed, "range_a", new BytesRef(randomAlphaOfLengthBetween(1, 20)), - new BytesRef(randomAlphaOfLengthBetween(1, 20)), randomNonNegativeLong(), InternalAggregations.EMPTY)); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + keyed = keyed == false; + break; + case 2: + buckets = new ArrayList<>(buckets); + buckets.add( + new InternalBinaryRange.Bucket( + format, + keyed, + "range_a", + new BytesRef(randomAlphaOfLengthBetween(1, 20)), + new BytesRef(randomAlphaOfLengthBetween(1, 20)), + randomNonNegativeLong(), + InternalAggregations.EMPTY + ) + ); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalBinaryRange(name, format, keyed, buckets, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java index e0afb71eb8647..cf1fee78fb8a5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java @@ -33,9 +33,14 @@ public void setUp() throws Exception { super.setUp(); format = randomNumericDocValueFormat(); - Function interval = randomFrom(dateTime -> dateTime.plusSeconds(1), dateTime -> dateTime.plusMinutes(1), - dateTime -> dateTime.plusHours(1), dateTime -> dateTime.plusDays(1), dateTime -> dateTime.plusMonths(1), dateTime -> - dateTime.plusYears(1)); + Function interval = randomFrom( + dateTime -> dateTime.plusSeconds(1), + dateTime -> dateTime.plusMinutes(1), + dateTime -> dateTime.plusHours(1), + dateTime -> dateTime.plusDays(1), + dateTime -> dateTime.plusMonths(1), + dateTime -> dateTime.plusYears(1) + ); final int numRanges = randomNumberOfBuckets(); final List> listOfRanges = new ArrayList<>(numRanges); @@ -63,10 +68,12 @@ public void setUp() throws Exception { } @Override - protected InternalDateRange createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - boolean keyed) { + protected InternalDateRange createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + boolean keyed + ) { final List buckets = new ArrayList<>(); for (int i = 0; i < dateRanges.size(); ++i) { Tuple range = dateRanges.get(i); @@ -101,28 +108,37 @@ protected InternalDateRange mutateInstance(InternalDateRange instance) { List buckets = instance.getBuckets(); Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - keyed = keyed == false; - break; - case 2: - buckets = new ArrayList<>(buckets); - double from = randomDouble(); - buckets.add(new InternalDateRange.Bucket("range_a", from, from + randomDouble(), randomNonNegativeLong(), - InternalAggregations.EMPTY, false, format)); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + keyed = keyed == false; + break; + case 2: + buckets = new ArrayList<>(buckets); + double from = randomDouble(); + buckets.add( + new InternalDateRange.Bucket( + "range_a", + from, + from + randomDouble(), + randomNonNegativeLong(), + InternalAggregations.EMPTY, + false, + format + ) + ); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalDateRange(name, buckets, format, keyed, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java index 93a4cea076a3b..1b01e14fbb50f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java @@ -50,10 +50,12 @@ public void setUp() throws Exception { } @Override - protected InternalGeoDistance createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - boolean keyed) { + protected InternalGeoDistance createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + boolean keyed + ) { final List buckets = new ArrayList<>(); for (int i = 0; i < geoDistanceRanges.size(); ++i) { Tuple range = geoDistanceRanges.get(i); @@ -87,28 +89,36 @@ protected InternalGeoDistance mutateInstance(InternalGeoDistance instance) { List buckets = instance.getBuckets(); Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - keyed = keyed == false; - break; - case 2: - buckets = new ArrayList<>(buckets); - double from = randomDouble(); - buckets.add(new InternalGeoDistance.Bucket("range_a", from, from + randomDouble(), randomNonNegativeLong(), - InternalAggregations.EMPTY, false)); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + keyed = keyed == false; + break; + case 2: + buckets = new ArrayList<>(buckets); + double from = randomDouble(); + buckets.add( + new InternalGeoDistance.Bucket( + "range_a", + from, + from + randomDouble(), + randomNonNegativeLong(), + InternalAggregations.EMPTY, + false + ) + ); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalGeoDistance(name, buckets, keyed, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTestCase.java index 56ac9b08f940f..2925429a52514 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTestCase.java @@ -41,15 +41,16 @@ protected void assertReduced(T reduced, List inputs) { final Map expectedCounts = new TreeMap<>(); for (T input : inputs) { for (Range.Bucket bucket : input.getBuckets()) { - expectedCounts.compute(bucket.getKeyAsString(), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + expectedCounts.compute( + bucket.getKeyAsString(), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount() + ); } } final Map actualCounts = new TreeMap<>(); for (Range.Bucket bucket : reduced.getBuckets()) { - actualCounts.compute(bucket.getKeyAsString(), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + actualCounts.compute(bucket.getKeyAsString(), (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); } assertEquals(expectedCounts, actualCounts); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java index d32f506dc183f..29b3c496d5171 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java @@ -63,10 +63,12 @@ public void setUp() throws Exception { } @Override - protected InternalRange createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - boolean keyed) { + protected InternalRange createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + boolean keyed + ) { final List buckets = new ArrayList<>(); for (int i = 0; i < ranges.size(); ++i) { Tuple range = ranges.get(i); @@ -101,28 +103,37 @@ protected Class parsedRange List buckets = instance.getBuckets(); Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - keyed = keyed == false; - break; - case 2: - buckets = new ArrayList<>(buckets); - double from = randomDouble(); - buckets.add(new InternalRange.Bucket("range_a", from, from + randomDouble(), randomNonNegativeLong(), - InternalAggregations.EMPTY, false, format)); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + keyed = keyed == false; + break; + case 2: + buckets = new ArrayList<>(buckets); + double from = randomDouble(); + buckets.add( + new InternalRange.Bucket( + "range_a", + from, + from + randomDouble(), + randomNonNegativeLong(), + InternalAggregations.EMPTY, + false, + format + ) + ); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalRange<>(name, buckets, format, keyed, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java index 1ae7f5bc921fe..7ffb8e2d90995 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java @@ -16,8 +16,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.DocValueFormat; @@ -36,7 +36,7 @@ private static boolean isInRange(BytesRef value, BytesRef from, BytesRef to) { return false; } - private static final Comparator > RANGE_COMPARATOR = (a, b) -> { + private static final Comparator> RANGE_COMPARATOR = (a, b) -> { int cmp = compare(a.v1(), b.v1(), 1); if (cmp == 0) { cmp = compare(a.v2(), b.v2(), -1); @@ -45,12 +45,10 @@ private static boolean isInRange(BytesRef value, BytesRef from, BytesRef to) { }; private static int compare(BytesRef a, BytesRef b, int m) { - return a == null - ? b == null ? 0 : -m - : b == null ? m : a.compareTo(b); + return a == null ? b == null ? 0 : -m : b == null ? m : a.compareTo(b); } - @SuppressWarnings({"rawtypes", "unchecked"}) + @SuppressWarnings({ "rawtypes", "unchecked" }) public void testRanges() throws Exception { boolean v4 = randomBoolean(); IpRangeAggregationBuilder builder = new IpRangeAggregationBuilder("test_agg").field("field"); @@ -78,8 +76,7 @@ public void testRanges() throws Exception { } Arrays.sort(requestedRanges, RANGE_COMPARATOR); int[] expectedCounts = new int[numRanges]; - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { int numDocs = randomIntBetween(10, 100); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); @@ -125,15 +122,13 @@ public void testRanges() throws Exception { } public void testMissingUnmapped() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (int i = 0; i < 7; i++) { Document doc = new Document(); w.addDocument(doc); } - IpRangeAggregationBuilder builder = new IpRangeAggregationBuilder("test_agg") - .field("field") + IpRangeAggregationBuilder builder = new IpRangeAggregationBuilder("test_agg").field("field") .addRange(new IpRangeAggregationBuilder.Range("foo", "192.168.100.0", "192.168.100.255")) .missing("192.168.100.42"); // Apparently we expect a string here try (IndexReader reader = w.getReader()) { @@ -145,15 +140,13 @@ public void testMissingUnmapped() throws Exception { } public void testMissingUnmappedBadType() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (int i = 0; i < 7; i++) { Document doc = new Document(); w.addDocument(doc); } - IpRangeAggregationBuilder builder = new IpRangeAggregationBuilder("test_agg") - .field("field") + IpRangeAggregationBuilder builder = new IpRangeAggregationBuilder("test_agg").field("field") .addRange(new IpRangeAggregationBuilder.Range("foo", "192.168.100.0", "192.168.100.255")) .missing(1234); try (IndexReader reader = w.getReader()) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilderTests.java index e6124fe8db817..2175f19ce02f0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregationBuilderTests.java @@ -99,8 +99,9 @@ protected RangeAggregationBuilder mutateInstance(RangeAggregationBuilder builder } public void testNumericKeys() throws IOException { - RangeAggregationBuilder builder = doParseInstance(createParser(JsonXContent.jsonXContent, - "{\"test\":{\"range\":{\"field\":\"f\",\"ranges\":[{\"key\":1,\"to\":0}]}}}")); + RangeAggregationBuilder builder = doParseInstance( + createParser(JsonXContent.jsonXContent, "{\"test\":{\"range\":{\"field\":\"f\",\"ranges\":[{\"key\":1,\"to\":0}]}}}") + ); assertThat(builder.getName(), equalTo("test")); assertThat(builder.field(), equalTo("f")); assertThat(builder.ranges, equalTo(List.of(new RangeAggregator.Range("1", null, 0d)))); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java index 4e037402583a1..326a29b1ff4a6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java @@ -167,8 +167,7 @@ public void testDateFieldMillisecondResolution() throws IOException { long milli1 = ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); long milli2 = ZonedDateTime.of(2016, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range") - .field(DATE_FIELD_NAME) + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range").field(DATE_FIELD_NAME) .addRange(milli1 - 1, milli1 + 1); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -183,15 +182,23 @@ public void testDateFieldMillisecondResolution() throws IOException { } public void testDateFieldNanosecondResolution() throws IOException { - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD_NAME, true, false, true, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, DateFieldMapper.Resolution.NANOSECONDS, null, null, Collections.emptyMap()); + DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType( + DATE_FIELD_NAME, + true, + false, + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + DateFieldMapper.Resolution.NANOSECONDS, + null, + null, + Collections.emptyMap() + ); // These values should work because aggs scale nanosecond up to millisecond always. long milli1 = ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); long milli2 = ZonedDateTime.of(2016, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range") - .field(DATE_FIELD_NAME) + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range").field(DATE_FIELD_NAME) .addRange(milli1 - 1, milli1 + 1); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -205,16 +212,24 @@ public void testDateFieldNanosecondResolution() throws IOException { }, fieldType); } - public void testMissingDateWithDateNanosField() throws IOException { - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD_NAME, true, false, true, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, DateFieldMapper.Resolution.NANOSECONDS, null, null, Collections.emptyMap()); + public void testMissingDateWithDateNanosField() throws IOException { + DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType( + DATE_FIELD_NAME, + true, + false, + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + DateFieldMapper.Resolution.NANOSECONDS, + null, + null, + Collections.emptyMap() + ); // These values should work because aggs scale nanosecond up to millisecond always. long milli1 = ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); long milli2 = ZonedDateTime.of(2016, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range") - .field(DATE_FIELD_NAME) + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range").field(DATE_FIELD_NAME) .missing("2015-11-13T16:14:34") .addRange(milli1 - 1, milli1 + 1); @@ -247,8 +262,7 @@ public void testNotFitIntoDouble() throws IOException { long start = 2L << 54; // Double stores 53 bits of mantissa, so we aggregate a bunch of bigger values - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range") - .field(NUMBER_FIELD_NAME) + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range").field(NUMBER_FIELD_NAME) .addRange(start, start + 50) .addRange(start + 50, start + 100) .addUnboundedFrom(start + 100); @@ -261,35 +275,30 @@ public void testNotFitIntoDouble() throws IOException { List ranges = range.getBuckets(); assertThat(ranges, hasSize(3)); // If we had a native `double` range aggregator we'd get 50, 50, 50 - assertThat(ranges.stream().mapToLong(InternalRange.Bucket::getDocCount).toArray(), equalTo(new long[] {44, 48, 58})); + assertThat(ranges.stream().mapToLong(InternalRange.Bucket::getDocCount).toArray(), equalTo(new long[] { 44, 48, 58 })); assertTrue(AggregationInspectionHelper.hasValue(range)); }, fieldType); } - public void testMissingDateWithNumberField() throws IOException { - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range") - .field(NUMBER_FIELD_NAME) + public void testMissingDateWithNumberField() throws IOException { + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range").field(NUMBER_FIELD_NAME) .addRange(-2d, 5d) .missing("1979-01-01T00:00:00"); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); - expectThrows(NumberFormatException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); + }, range -> fail("Should have thrown exception"), fieldType)); } public void testUnmappedWithMissingNumber() throws IOException { - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range") - .field("does_not_exist") + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range").field("does_not_exist") .addRange(-2d, 5d) .missing(0L); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); @@ -303,70 +312,64 @@ public void testUnmappedWithMissingNumber() throws IOException { } public void testUnmappedWithMissingDate() throws IOException { - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range") - .field("does_not_exist") + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range").field("does_not_exist") .addRange(-2d, 5d) .missing("2020-02-13T10:11:12"); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); - expectThrows(NumberFormatException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); + }, range -> fail("Should have thrown exception"), fieldType)); } public void testUnsupportedType() { - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range") - .field("not_a_number") - .addRange(-2d, 5d); + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range").field("not_a_number").addRange(-2d, 5d); MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("not_a_number"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); - }, range -> fail("Should have thrown exception"), fieldType)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> testCase( + aggregationBuilder, + new MatchAllDocsQuery(), + iw -> { iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); }, + range -> fail("Should have thrown exception"), + fieldType + ) + ); assertEquals("Field [not_a_number] of type [keyword] is not supported for aggregation [range]", e.getMessage()); } public void testBadMissingField() { - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range") - .field(NUMBER_FIELD_NAME) + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range").field(NUMBER_FIELD_NAME) .addRange(-2d, 5d) .missing("bogus"); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); - expectThrows(NumberFormatException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); + }, range -> fail("Should have thrown exception"), fieldType)); } public void testUnmappedWithBadMissingField() { - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range") - .field("does_not_exist") + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range").field("does_not_exist") .addRange(-2d, 5d) .missing("bogus"); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); - expectThrows(NumberFormatException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); - iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); + iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); + }, range -> fail("Should have thrown exception"), fieldType)); } public void testSubAggCollectsFromSingleBucketIfOneRange() throws IOException { - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("test") - .field(NUMBER_FIELD_NAME) + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("test").field(NUMBER_FIELD_NAME) .addRange(0d, 10d) .subAggregation(aggCardinality("c")); @@ -378,8 +381,7 @@ public void testSubAggCollectsFromSingleBucketIfOneRange() throws IOException { } public void testSubAggCollectsFromManyBucketsIfManyRanges() throws IOException { - RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("test") - .field(NUMBER_FIELD_NAME) + RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("test").field(NUMBER_FIELD_NAME) .addRange(0d, 10d) .addRange(10d, 100d) .subAggregation(aggCardinality("c")); @@ -538,11 +540,13 @@ private void simpleTestCase( }, verify, fieldType); } - private void testCase(RangeAggregationBuilder aggregationBuilder, - Query query, - CheckedConsumer buildIndex, - Consumer>> verify, - MappedFieldType fieldType) throws IOException { + private void testCase( + RangeAggregationBuilder aggregationBuilder, + Query query, + CheckedConsumer buildIndex, + Consumer>> verify, + MappedFieldType fieldType + ) throws IOException { try (Directory directory = newDirectory()) { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); buildIndex.accept(indexWriter); @@ -551,8 +555,12 @@ private void testCase(RangeAggregationBuilder aggregationBuilder, try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - InternalRange> agg = searchAndReduce(indexSearcher, - query, aggregationBuilder, fieldType); + InternalRange> agg = searchAndReduce( + indexSearcher, + query, + aggregationBuilder, + fieldType + ); verify.accept(agg); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java index eca76fa6ee752..7f4d135e34423 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java @@ -57,8 +57,11 @@ public void testReplay() throws Exception { final AtomicLong bytes = new AtomicLong(0); - BestDocsDeferringCollector collector = new BestDocsDeferringCollector(numDocs, - new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()), bytes::addAndGet); + BestDocsDeferringCollector collector = new BestDocsDeferringCollector( + numDocs, + new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()), + bytes::addAndGet + ); Set deferredCollectedDocIds = new HashSet<>(); collector.setDeferredCollector(Collections.singleton(testCollector(deferredCollectedDocIds))); collector.preCollection(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java index c8b33a2064d53..645048e84f3a2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java @@ -143,25 +143,36 @@ public void testRidiculousSize() throws Exception { } } - private void testCase(IndexSearcher indexSearcher, MappedFieldType genreFieldType, String executionHint, - Consumer verify) throws IOException { + private void testCase( + IndexSearcher indexSearcher, + MappedFieldType genreFieldType, + String executionHint, + Consumer verify + ) throws IOException { testCase(indexSearcher, genreFieldType, executionHint, verify, 100, 1); } - private void testCase(IndexSearcher indexSearcher, MappedFieldType genreFieldType, String executionHint, - Consumer verify, int shardSize, int maxDocsPerValue) throws IOException { + private void testCase( + IndexSearcher indexSearcher, + MappedFieldType genreFieldType, + String executionHint, + Consumer verify, + int shardSize, + int maxDocsPerValue + ) throws IOException { MappedFieldType idFieldType = new KeywordFieldMapper.KeywordFieldType("id"); SortedNumericIndexFieldData fieldData = new SortedNumericIndexFieldData("price", IndexNumericFieldData.NumericType.DOUBLE); - FunctionScoreQuery query = new FunctionScoreQuery(new MatchAllDocsQuery(), - new FieldValueFactorFunction("price", 1, FieldValueFactorFunction.Modifier.RECIPROCAL, null, fieldData)); + FunctionScoreQuery query = new FunctionScoreQuery( + new MatchAllDocsQuery(), + new FieldValueFactorFunction("price", 1, FieldValueFactorFunction.Modifier.RECIPROCAL, null, fieldData) + ); - DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name") - .field(genreFieldType.name()) - .executionHint(executionHint) - .maxDocsPerValue(maxDocsPerValue) - .shardSize(shardSize) - .subAggregation(new TermsAggregationBuilder("terms").field("id")); + DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name").field(genreFieldType.name()) + .executionHint(executionHint) + .maxDocsPerValue(maxDocsPerValue) + .shardSize(shardSize) + .subAggregation(new TermsAggregationBuilder("terms").field("id")); InternalSampler result = searchAndReduce(indexSearcher, query, builder, genreFieldType, idFieldType); verify.accept(result); @@ -178,9 +189,8 @@ public void testDiversifiedSampler_noDocs() throws Exception { MappedFieldType genreFieldType = new KeywordFieldMapper.KeywordFieldType("genre"); - DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name") - .field(genreFieldType.name()) - .subAggregation(new TermsAggregationBuilder("terms").field("id")); + DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name").field(genreFieldType.name()) + .subAggregation(new TermsAggregationBuilder("terms").field("id")); InternalSampler result = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, genreFieldType, idFieldType); Terms terms = result.getAggregations().get("terms"); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java index da9597b4b8931..e94ccf26d756a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSamplerTests.java @@ -16,8 +16,12 @@ public class InternalSamplerTests extends InternalSingleBucketAggregationTestCase { @Override - protected InternalSampler createTestInstance(String name, long docCount, InternalAggregations aggregations, - Map metadata) { + protected InternalSampler createTestInstance( + String name, + long docCount, + InternalAggregations aggregations, + Map metadata + ) { return new InternalSampler(name, docCount, aggregations, metadata); } @@ -26,7 +30,6 @@ protected void extraAssertReduced(InternalSampler reduced, List // Nothing extra to assert } - @Override protected Class implementationClass() { return ParsedSampler.class; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java index b168576eae216..8b91bd09c3d40 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java @@ -48,9 +48,8 @@ public void testSampler() throws IOException { IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment with predictable docIds - try (Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { - for (long value : new long[] {7, 3, -10, -6, 5, 50}) { + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { + for (long value : new long[] { 7, 3, -10, -6, 5, 50 }) { Document doc = new Document(); StringBuilder text = new StringBuilder(); for (int i = 0; i < value; i++) { @@ -61,15 +60,18 @@ public void testSampler() throws IOException { w.addDocument(doc); } - SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler") - .shardSize(3) - .subAggregation(new MinAggregationBuilder("min") - .field("int")); + SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler").shardSize(3) + .subAggregation(new MinAggregationBuilder("min").field("int")); try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); - InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "good")), aggBuilder, textFieldType, - numericFieldType); + InternalSampler sampler = searchAndReduce( + searcher, + new TermQuery(new Term("text", "good")), + aggBuilder, + textFieldType, + numericFieldType + ); Min min = sampler.getAggregations().get("min"); assertEquals(5.0, min.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(sampler)); @@ -84,9 +86,8 @@ public void testRidiculousSize() throws IOException { IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment with predictable docIds - try (Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { - for (long value : new long[] {7, 3, -10, -6, 5, 50}) { + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { + for (long value : new long[] { 7, 3, -10, -6, 5, 50 }) { Document doc = new Document(); StringBuilder text = new StringBuilder(); for (int i = 0; i < value; i++) { @@ -98,15 +99,18 @@ public void testRidiculousSize() throws IOException { } // Test with an outrageously large size to ensure that the maxDoc protection works - SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler") - .shardSize(Integer.MAX_VALUE) - .subAggregation(new MinAggregationBuilder("min") - .field("int")); + SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler").shardSize(Integer.MAX_VALUE) + .subAggregation(new MinAggregationBuilder("min").field("int")); try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); - InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "good")), aggBuilder, textFieldType, - numericFieldType); + InternalSampler sampler = searchAndReduce( + searcher, + new TermQuery(new Term("text", "good")), + aggBuilder, + textFieldType, + numericFieldType + ); Min min = sampler.getAggregations().get("min"); assertEquals(3.0, min.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(sampler)); @@ -119,22 +123,17 @@ public void testRidiculousSize() throws IOException { */ public void testEmptyParentBucket() throws Exception { IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - try (Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, indexWriterConfig)) { + try (Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, indexWriterConfig)) { writer.addDocument(new Document()); try (IndexReader reader = DirectoryReader.open(writer)) { IndexSearcher searcher = new IndexSearcher(reader); - QueryBuilder[] filters = new QueryBuilder[]{ - new MatchAllQueryBuilder(), - new MatchNoneQueryBuilder() - }; + QueryBuilder[] filters = new QueryBuilder[] { new MatchAllQueryBuilder(), new MatchNoneQueryBuilder() }; FiltersAggregationBuilder samplerParent = new FiltersAggregationBuilder("filters", filters); TermsAggregationBuilder samplerChild = new TermsAggregationBuilder("child").field("field"); - SamplerAggregationBuilder sampler = new SamplerAggregationBuilder("sampler") - .subAggregation(samplerChild); + SamplerAggregationBuilder sampler = new SamplerAggregationBuilder("sampler").subAggregation(samplerChild); samplerParent.subAggregation(sampler); InternalFilters response = searchAndReduce(searcher, new MatchAllDocsQuery(), samplerParent); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java index 3eb1654d39709..80c8909cd2129 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java @@ -44,65 +44,96 @@ public class BinaryTermsAggregatorTests extends AggregatorTestCase { d.add((long) i); } } - dataset = d; + dataset = d; } public void testMatchNoDocs() throws IOException { - testSearchCase(new MatchNoDocsQuery(), dataset, + testSearchCase( + new MatchNoDocsQuery(), + dataset, aggregation -> aggregation.field(BINARY_FIELD), - agg -> assertEquals(0, agg.getBuckets().size()), ValueType.STRING + agg -> assertEquals(0, agg.getBuckets().size()), + ValueType.STRING ); } public void testMatchAllDocs() throws IOException { Query query = new MatchAllDocsQuery(); - testSearchCase(query, dataset, - aggregation -> aggregation.field(BINARY_FIELD), - agg -> { - assertEquals(9, agg.getBuckets().size()); - for (int i = 0; i < 9; i++) { - StringTerms.Bucket bucket = (StringTerms.Bucket) agg.getBuckets().get(i); - byte[] bytes = Numbers.longToBytes(9L - i); - String bytesAsString = (String) DocValueFormat.BINARY.format(new BytesRef(bytes)); - assertThat(bucket.getKey(), equalTo(bytesAsString)); - assertThat(bucket.getDocCount(), equalTo(9L - i)); - } - }, null); + testSearchCase(query, dataset, aggregation -> aggregation.field(BINARY_FIELD), agg -> { + assertEquals(9, agg.getBuckets().size()); + for (int i = 0; i < 9; i++) { + StringTerms.Bucket bucket = (StringTerms.Bucket) agg.getBuckets().get(i); + byte[] bytes = Numbers.longToBytes(9L - i); + String bytesAsString = (String) DocValueFormat.BINARY.format(new BytesRef(bytes)); + assertThat(bucket.getKey(), equalTo(bytesAsString)); + assertThat(bucket.getDocCount(), equalTo(9L - i)); + } + }, null); } public void testBadIncludeExclude() throws IOException { IncludeExclude includeExclude = new IncludeExclude(new RegExp("foo"), null); // Make sure the include/exclude fails regardless of how the user tries to type hint the agg - AggregationExecutionException e = expectThrows(AggregationExecutionException.class, - () -> testSearchCase(new MatchNoDocsQuery(), dataset, + AggregationExecutionException e = expectThrows( + AggregationExecutionException.class, + () -> testSearchCase( + new MatchNoDocsQuery(), + dataset, aggregation -> aggregation.field(BINARY_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"), - agg -> fail("test should have failed with exception"), null // default, no hint - )); - assertThat(e.getMessage(), equalTo("Aggregation [_name] cannot support regular expression style include/exclude settings as " + - "they can only be applied to string fields. Use an array of values for include/exclude clauses")); + agg -> fail("test should have failed with exception"), + null // default, no hint + ) + ); + assertThat( + e.getMessage(), + equalTo( + "Aggregation [_name] cannot support regular expression style include/exclude settings as " + + "they can only be applied to string fields. Use an array of values for include/exclude clauses" + ) + ); - e = expectThrows(AggregationExecutionException.class, - () -> testSearchCase(new MatchNoDocsQuery(), dataset, + e = expectThrows( + AggregationExecutionException.class, + () -> testSearchCase( + new MatchNoDocsQuery(), + dataset, aggregation -> aggregation.field(BINARY_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"), - agg -> fail("test should have failed with exception"), ValueType.STRING // string type hint - )); - assertThat(e.getMessage(), equalTo("Aggregation [_name] cannot support regular expression style include/exclude settings as " + - "they can only be applied to string fields. Use an array of values for include/exclude clauses")); + agg -> fail("test should have failed with exception"), + ValueType.STRING // string type hint + ) + ); + assertThat( + e.getMessage(), + equalTo( + "Aggregation [_name] cannot support regular expression style include/exclude settings as " + + "they can only be applied to string fields. Use an array of values for include/exclude clauses" + ) + ); } public void testBadUserValueTypeHint() throws IOException { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchNoDocsQuery(), dataset, - aggregation -> aggregation.field(BINARY_FIELD), - agg -> fail("test should have failed with exception"), ValueType.NUMERIC // numeric type hint - )); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> testSearchCase( + new MatchNoDocsQuery(), + dataset, + aggregation -> aggregation.field(BINARY_FIELD), + agg -> fail("test should have failed with exception"), + ValueType.NUMERIC // numeric type hint + ) + ); assertThat(e.getMessage(), equalTo("Expected numeric type on field [binary], but got [binary]")); } - private void testSearchCase(Query query, List dataset, - Consumer configure, - Consumer> verify, ValueType valueType) throws IOException { + private void testSearchCase( + Query query, + List dataset, + Consumer configure, + Consumer> verify, + ValueType valueType + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrdsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrdsTests.java index 693ad86af0e35..8a0364b63a1b5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrdsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrdsTests.java @@ -111,8 +111,10 @@ public void testCollectsFromManyBuckets() { OwningBucketOrdAndValue[] values = new OwningBucketOrdAndValue[scaledRandomIntBetween(1, 10000)]; long maxOwningBucketOrd = scaledRandomIntBetween(0, values.length); for (int i = 0; i < values.length; i++) { - values[i] = randomValueOtherThanMany(seen::contains, () -> - new OwningBucketOrdAndValue(randomLongBetween(0, maxOwningBucketOrd), new BytesRef(Long.toString(randomLong())))); + values[i] = randomValueOtherThanMany( + seen::contains, + () -> new OwningBucketOrdAndValue(randomLongBetween(0, maxOwningBucketOrd), new BytesRef(Long.toString(randomLong()))) + ); seen.add(values[i]); } for (int i = 0; i < values.length; i++) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java index df0b71db90511..899e02e423a6d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java @@ -23,11 +23,13 @@ public class DoubleTermsTests extends InternalTermsTestCase { @Override - protected InternalTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - boolean showTermDocCountError, - long docCountError) { + protected InternalTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + boolean showTermDocCountError, + long docCountError + ) { BucketOrder order = BucketOrder.count(false); long minDocCount = 1; int requiredSize = 3; @@ -44,8 +46,20 @@ public class DoubleTermsTests extends InternalTermsTestCase { } BucketOrder reduceOrder = rarely() ? order : BucketOrder.key(true); Collections.sort(buckets, reduceOrder.comparator()); - return new DoubleTerms(name, reduceOrder, order, requiredSize, minDocCount, - metadata, format, shardSize, showTermDocCountError, otherDocCount, buckets, docCountError); + return new DoubleTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } @Override @@ -69,46 +83,66 @@ protected Class implementationClass() { long docCountError = doubleTerms.getDocCountError(); Map metadata = doubleTerms.getMetadata(); switch (between(0, 8)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - requiredSize += between(1, 100); - break; - case 2: - minDocCount += between(1, 100); - break; - case 3: - shardSize += between(1, 100); - break; - case 4: - showTermDocCountError = showTermDocCountError == false; - break; - case 5: - otherDocCount += between(1, 100); - break; - case 6: - docCountError += between(1, 100); - break; - case 7: - buckets = new ArrayList<>(buckets); - buckets.add(new DoubleTerms.Bucket(randomDouble(), randomNonNegativeLong(), InternalAggregations.EMPTY, - showTermDocCountError, docCountError, format)); - break; - case 8: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + requiredSize += between(1, 100); + break; + case 2: + minDocCount += between(1, 100); + break; + case 3: + shardSize += between(1, 100); + break; + case 4: + showTermDocCountError = showTermDocCountError == false; + break; + case 5: + otherDocCount += between(1, 100); + break; + case 6: + docCountError += between(1, 100); + break; + case 7: + buckets = new ArrayList<>(buckets); + buckets.add( + new DoubleTerms.Bucket( + randomDouble(), + randomNonNegativeLong(), + InternalAggregations.EMPTY, + showTermDocCountError, + docCountError, + format + ) + ); + break; + case 8: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } Collections.sort(buckets, doubleTerms.reduceOrder.comparator()); - return new DoubleTerms(name, doubleTerms.reduceOrder, order, requiredSize, minDocCount, metadata, format, shardSize, - showTermDocCountError, otherDocCount, buckets, docCountError); + return new DoubleTerms( + name, + doubleTerms.reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } else { String name = instance.getName(); BucketOrder order = instance.order; @@ -116,25 +150,25 @@ protected Class implementationClass() { long minDocCount = instance.minDocCount; Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - requiredSize += between(1, 100); - break; - case 2: - minDocCount += between(1, 100); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + requiredSize += between(1, 100); + break; + case 2: + minDocCount += between(1, 100); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new UnmappedTerms(name, order, requiredSize, minDocCount, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTermsTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTermsTestCase.java index 287d7ac122a7b..88e35dfa4f2ec 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTermsTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTermsTestCase.java @@ -28,16 +28,20 @@ public void init() { } @Override - protected final InternalRareTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggregations) { + protected final InternalRareTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations + ) { return createTestInstance(name, metadata, aggregations, maxDocCount); } - protected abstract InternalRareTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - long maxDocCount); + protected abstract InternalRareTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + long maxDocCount + ); @Override protected InternalRareTerms createUnmappedInstance(String name, Map metadata) { @@ -55,9 +59,6 @@ protected void assertReduced(InternalRareTerms reduced, List toCounts(Stream buckets) { - return buckets.collect(Collectors.toMap( - RareTerms.Bucket::getKey, - RareTerms.Bucket::getDocCount, - Long::sum)); + return buckets.collect(Collectors.toMap(RareTerms.Bucket::getKey, RareTerms.Bucket::getDocCount, Long::sum)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java index f2431e9c05f1b..618308c33a8a9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java @@ -36,9 +36,11 @@ public void setUp() throws Exception { } @Override - protected final InternalSignificantTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggregations) { + protected final InternalSignificantTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations + ) { final int requiredSize = randomIntBetween(1, 5); final int numBuckets = randomNumberOfBuckets(); @@ -58,21 +60,35 @@ public void setUp() throws Exception { subsetSize += subsetDf; supersetSize += supersetDf; } - return createTestInstance(name, metadata, aggregations, requiredSize, numBuckets, subsetSize, subsetDfs, - supersetSize, supersetDfs, significanceHeuristic); + return createTestInstance( + name, + metadata, + aggregations, + requiredSize, + numBuckets, + subsetSize, + subsetDfs, + supersetSize, + supersetDfs, + significanceHeuristic + ); } - protected abstract InternalSignificantTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - int requiredSize, int numBuckets, - long subsetSize, int[] subsetDfs, - long supersetSize, int[] supersetDfs, - SignificanceHeuristic significanceHeuristic); + protected abstract InternalSignificantTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + int requiredSize, + int numBuckets, + long subsetSize, + int[] subsetDfs, + long supersetSize, + int[] supersetDfs, + SignificanceHeuristic significanceHeuristic + ); @Override - protected InternalSignificantTerms createUnmappedInstance(String name, - Map metadata) { + protected InternalSignificantTerms createUnmappedInstance(String name, Map metadata) { InternalSignificantTerms testInstance = createTestInstance(name, metadata); return new UnmappedSignificantTerms(name, testInstance.requiredSize, testInstance.minDocCount, metadata); } @@ -83,9 +99,9 @@ protected void assertReduced(InternalSignificantTerms reduced, List> counts = Arrays.asList( - SignificantTerms.Bucket::getSubsetDf, - SignificantTerms.Bucket::getSupersetDf, - SignificantTerms.Bucket::getDocCount + SignificantTerms.Bucket::getSubsetDf, + SignificantTerms.Bucket::getSupersetDf, + SignificantTerms.Bucket::getDocCount ); for (Function count : counts) { @@ -134,16 +150,19 @@ protected void assertBucket(MultiBucketsAggregation.Bucket expected, MultiBucket assertEquals(expectedSigTerm.getSupersetSize(), actualSigTerm.getSupersetSize()); } - private static Map toCounts(Stream buckets, - Function fn) { + private static Map toCounts( + Stream buckets, + Function fn + ) { return buckets.collect(Collectors.toMap(SignificantTerms.Bucket::getKey, fn, Long::sum)); } private static SignificanceHeuristic randomSignificanceHeuristic() { return randomFrom( - new JLHScore(), - new MutualInformation(randomBoolean(), randomBoolean()), - new GND(randomBoolean()), - new ChiSquare(randomBoolean(), randomBoolean())); + new JLHScore(), + new MutualInformation(randomBoolean(), randomBoolean()), + new GND(randomBoolean()), + new ChiSquare(randomBoolean(), randomBoolean()) + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTermsTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTermsTestCase.java index 19b1b1a6f683c..cce943733ea86 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTermsTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTermsTestCase.java @@ -38,11 +38,13 @@ public void init() { return createTestInstance(name, metadata, aggregations, showDocCount, docCountError); } - protected abstract InternalTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - boolean showTermDocCountError, - long docCountError); + protected abstract InternalTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + boolean showTermDocCountError, + long docCountError + ); @Override protected InternalTerms createUnmappedInstance(String name, Map metadata) { @@ -56,28 +58,31 @@ protected void assertReduced(InternalTerms reduced, List reducedCounts = toCounts(reduced.getBuckets().stream()); Map totalCounts = toCounts(inputs.stream().map(Terms::getBuckets).flatMap(List::stream)); - assertEquals(reducedCounts.size() == requiredSize, - totalCounts.size() >= requiredSize); + assertEquals(reducedCounts.size() == requiredSize, totalCounts.size() >= requiredSize); Map expectedReducedCounts = new HashMap<>(totalCounts); expectedReducedCounts.keySet().retainAll(reducedCounts.keySet()); assertEquals(expectedReducedCounts, reducedCounts); final long minFinalcount = reduced.getBuckets().isEmpty() - ? -1 - : reduced.getBuckets().get(reduced.getBuckets().size() - 1).getDocCount(); + ? -1 + : reduced.getBuckets().get(reduced.getBuckets().size() - 1).getDocCount(); Map evictedTerms = new HashMap<>(totalCounts); evictedTerms.keySet().removeAll(reducedCounts.keySet()); - Optional> missingTerm = evictedTerms.entrySet().stream() - .filter(e -> e.getValue() > minFinalcount).findAny(); + Optional> missingTerm = evictedTerms.entrySet().stream().filter(e -> e.getValue() > minFinalcount).findAny(); if (missingTerm.isPresent()) { fail("Missed term: " + missingTerm + " from " + reducedCounts); } - final long reducedTotalDocCount = reduced.getSumOfOtherDocCounts() - + reduced.getBuckets().stream().mapToLong(Terms.Bucket::getDocCount).sum(); - final long expectedTotalDocCount = inputs.stream().map(Terms::getBuckets) - .flatMap(List::stream).mapToLong(Terms.Bucket::getDocCount).sum(); + final long reducedTotalDocCount = reduced.getSumOfOtherDocCounts() + reduced.getBuckets() + .stream() + .mapToLong(Terms.Bucket::getDocCount) + .sum(); + final long expectedTotalDocCount = inputs.stream() + .map(Terms::getBuckets) + .flatMap(List::stream) + .mapToLong(Terms.Bucket::getDocCount) + .sum(); assertEquals(expectedTotalDocCount, reducedTotalDocCount); for (InternalTerms terms : inputs) { assertThat(reduced.reduceOrder, equalTo(terms.order)); @@ -86,9 +91,6 @@ protected void assertReduced(InternalTerms reduced, List toCounts(Stream buckets) { - return buckets.collect(Collectors.toMap( - Terms.Bucket::getKey, - Terms.Bucket::getDocCount, - Long::sum)); + return buckets.collect(Collectors.toMap(Terms.Bucket::getKey, Terms.Bucket::getDocCount, Long::sum)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java index 74a998af782b7..2565cd272a9e3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java @@ -43,54 +43,66 @@ public class KeywordTermsAggregatorTests extends AggregatorTestCase { d.add(String.valueOf(i)); } } - dataset = d; + dataset = d; } public void testMatchNoDocs() throws IOException { - testSearchCase(new MatchNoDocsQuery(), dataset, + testSearchCase( + new MatchNoDocsQuery(), + dataset, aggregation -> aggregation.field(KEYWORD_FIELD), - agg -> assertEquals(0, agg.getBuckets().size()), null // without type hint + agg -> assertEquals(0, agg.getBuckets().size()), + null // without type hint ); - testSearchCase(new MatchNoDocsQuery(), dataset, + testSearchCase( + new MatchNoDocsQuery(), + dataset, aggregation -> aggregation.field(KEYWORD_FIELD), - agg -> assertEquals(0, agg.getBuckets().size()), ValueType.STRING // with type hint + agg -> assertEquals(0, agg.getBuckets().size()), + ValueType.STRING // with type hint ); } public void testMatchAllDocs() throws IOException { Query query = new MatchAllDocsQuery(); - testSearchCase(query, dataset, - aggregation -> aggregation.field(KEYWORD_FIELD), - agg -> { - assertEquals(9, agg.getBuckets().size()); - for (int i = 0; i < 9; i++) { - StringTerms.Bucket bucket = (StringTerms.Bucket) agg.getBuckets().get(i); - assertThat(bucket.getKey(), equalTo(String.valueOf(9L - i))); - assertThat(bucket.getDocCount(), equalTo(9L - i)); - } - }, null // without type hint + testSearchCase(query, dataset, aggregation -> aggregation.field(KEYWORD_FIELD), agg -> { + assertEquals(9, agg.getBuckets().size()); + for (int i = 0; i < 9; i++) { + StringTerms.Bucket bucket = (StringTerms.Bucket) agg.getBuckets().get(i); + assertThat(bucket.getKey(), equalTo(String.valueOf(9L - i))); + assertThat(bucket.getDocCount(), equalTo(9L - i)); + } + }, + null // without type hint ); - testSearchCase(query, dataset, - aggregation -> aggregation.field(KEYWORD_FIELD), - agg -> { - assertEquals(9, agg.getBuckets().size()); - for (int i = 0; i < 9; i++) { - StringTerms.Bucket bucket = (StringTerms.Bucket) agg.getBuckets().get(i); - assertThat(bucket.getKey(), equalTo(String.valueOf(9L - i))); - assertThat(bucket.getDocCount(), equalTo(9L - i)); - } - }, ValueType.STRING // with type hint + testSearchCase(query, dataset, aggregation -> aggregation.field(KEYWORD_FIELD), agg -> { + assertEquals(9, agg.getBuckets().size()); + for (int i = 0; i < 9; i++) { + StringTerms.Bucket bucket = (StringTerms.Bucket) agg.getBuckets().get(i); + assertThat(bucket.getKey(), equalTo(String.valueOf(9L - i))); + assertThat(bucket.getDocCount(), equalTo(9L - i)); + } + }, + ValueType.STRING // with type hint ); } - private void testSearchCase(Query query, List dataset, - Consumer configure, - Consumer> verify, ValueType valueType) throws IOException { - MappedFieldType keywordFieldType - = new KeywordFieldMapper.KeywordFieldType(KEYWORD_FIELD, randomBoolean(), true, Collections.emptyMap()); + private void testSearchCase( + Query query, + List dataset, + Consumer configure, + Consumer> verify, + ValueType valueType + ) throws IOException { + MappedFieldType keywordFieldType = new KeywordFieldMapper.KeywordFieldType( + KEYWORD_FIELD, + randomBoolean(), + true, + Collections.emptyMap() + ); try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); @@ -115,7 +127,6 @@ private void testSearchCase(Query query, List dataset, configure.accept(aggregationBuilder); } - InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, keywordFieldType); verify.accept(rareTerms); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongKeyedBucketOrdsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongKeyedBucketOrdsTests.java index 621d281b485dc..c4aa5eeaaffc1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongKeyedBucketOrdsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongKeyedBucketOrdsTests.java @@ -125,8 +125,10 @@ private void assertCollectsFromManyBuckets(LongKeyedBucketOrds ords, int maxAllo OwningBucketOrdAndValue[] values = new OwningBucketOrdAndValue[scaledRandomIntBetween(1, 10000)]; long maxOwningBucketOrd = Long.MIN_VALUE; for (int i = 0; i < values.length; i++) { - values[i] = randomValueOtherThanMany(seen::contains, () -> - new OwningBucketOrdAndValue(randomLongBetween(0, maxAllowedOwningBucketOrd), randomLongBetween(minValue, maxValue))); + values[i] = randomValueOtherThanMany( + seen::contains, + () -> new OwningBucketOrdAndValue(randomLongBetween(0, maxAllowedOwningBucketOrd), randomLongBetween(minValue, maxValue)) + ); seen.add(values[i]); maxOwningBucketOrd = Math.max(maxOwningBucketOrd, values[i].owningBucketOrd); } @@ -147,7 +149,6 @@ private void assertCollectsFromManyBuckets(LongKeyedBucketOrds ords, int maxAllo assertThat(ords.add(0, 0), equalTo(-1L)); assertThat(ords.add(1, 0), equalTo(-2L)); - for (long owningBucketOrd = 0; owningBucketOrd <= maxAllowedOwningBucketOrd; owningBucketOrd++) { long expectedCount = 0; LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = ords.ordsEnum(owningBucketOrd); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsTests.java index 6249c6d14bba5..134cd822d7d4a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsTests.java @@ -22,10 +22,12 @@ public class LongRareTermsTests extends InternalRareTermsTestCase { @Override - protected InternalRareTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - long maxDocCount) { + protected InternalRareTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + long maxDocCount + ) { BucketOrder order = BucketOrder.count(false); DocValueFormat format = randomNumericDocValueFormat(); List buckets = new ArrayList<>(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java index 7cc57fb18f646..9531fba31371f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java @@ -23,11 +23,13 @@ public class LongTermsTests extends InternalTermsTestCase { @Override - protected InternalTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - boolean showTermDocCountError, - long docCountError) { + protected InternalTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + boolean showTermDocCountError, + long docCountError + ) { BucketOrder order = BucketOrder.count(false); long minDocCount = 1; int requiredSize = 3; @@ -44,8 +46,20 @@ public class LongTermsTests extends InternalTermsTestCase { } BucketOrder reduceOrder = rarely() ? order : BucketOrder.key(true); Collections.sort(buckets, reduceOrder.comparator()); - return new LongTerms(name, reduceOrder, order, requiredSize, minDocCount, - metadata, format, shardSize, showTermDocCountError, otherDocCount, buckets, docCountError); + return new LongTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } @Override @@ -69,46 +83,66 @@ protected Class implementationClass() { long docCountError = longTerms.getDocCountError(); Map metadata = longTerms.getMetadata(); switch (between(0, 8)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - requiredSize += between(1, 100); - break; - case 2: - minDocCount += between(1, 100); - break; - case 3: - shardSize += between(1, 100); - break; - case 4: - showTermDocCountError = showTermDocCountError == false; - break; - case 5: - otherDocCount += between(1, 100); - break; - case 6: - docCountError += between(1, 100); - break; - case 7: - buckets = new ArrayList<>(buckets); - buckets.add(new LongTerms.Bucket(randomLong(), randomNonNegativeLong(), InternalAggregations.EMPTY, showTermDocCountError, - docCountError, format)); - break; - case 8: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + requiredSize += between(1, 100); + break; + case 2: + minDocCount += between(1, 100); + break; + case 3: + shardSize += between(1, 100); + break; + case 4: + showTermDocCountError = showTermDocCountError == false; + break; + case 5: + otherDocCount += between(1, 100); + break; + case 6: + docCountError += between(1, 100); + break; + case 7: + buckets = new ArrayList<>(buckets); + buckets.add( + new LongTerms.Bucket( + randomLong(), + randomNonNegativeLong(), + InternalAggregations.EMPTY, + showTermDocCountError, + docCountError, + format + ) + ); + break; + case 8: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } Collections.sort(buckets, longTerms.reduceOrder.comparator()); - return new LongTerms(name, longTerms.reduceOrder, order, requiredSize, minDocCount, metadata, format, shardSize, - showTermDocCountError, otherDocCount, buckets, docCountError); + return new LongTerms( + name, + longTerms.reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } else { String name = instance.getName(); BucketOrder order = instance.order; @@ -116,25 +150,25 @@ protected Class implementationClass() { long minDocCount = instance.minDocCount; Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - requiredSize += between(1, 100); - break; - case 2: - minDocCount += between(1, 100); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + requiredSize += between(1, 100); + break; + case 2: + minDocCount += between(1, 100); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new UnmappedTerms(name, order, requiredSize, minDocCount, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java index e4ca8de2f865f..97fc495adc82d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java @@ -43,47 +43,51 @@ public class NumericTermsAggregatorTests extends AggregatorTestCase { d.add((long) i); } } - dataset = d; + dataset = d; } public void testMatchNoDocs() throws IOException { - testSearchCase(new MatchNoDocsQuery(), dataset, + testSearchCase( + new MatchNoDocsQuery(), + dataset, aggregation -> aggregation.field(LONG_FIELD), - agg -> assertEquals(0, agg.getBuckets().size()), null // without type hint + agg -> assertEquals(0, agg.getBuckets().size()), + null // without type hint ); - testSearchCase(new MatchNoDocsQuery(), dataset, + testSearchCase( + new MatchNoDocsQuery(), + dataset, aggregation -> aggregation.field(LONG_FIELD), - agg -> assertEquals(0, agg.getBuckets().size()), ValueType.NUMERIC // with type hint + agg -> assertEquals(0, agg.getBuckets().size()), + ValueType.NUMERIC // with type hint ); } public void testMatchAllDocs() throws IOException { Query query = new MatchAllDocsQuery(); - testSearchCase(query, dataset, - aggregation -> aggregation.field(LONG_FIELD), - agg -> { - assertEquals(9, agg.getBuckets().size()); - for (int i = 0; i < 9; i++) { - LongTerms.Bucket bucket = (LongTerms.Bucket) agg.getBuckets().get(i); - assertThat(bucket.getKey(), equalTo(9L - i)); - assertThat(bucket.getDocCount(), equalTo(9L - i)); - } - }, null //without type hint + testSearchCase(query, dataset, aggregation -> aggregation.field(LONG_FIELD), agg -> { + assertEquals(9, agg.getBuckets().size()); + for (int i = 0; i < 9; i++) { + LongTerms.Bucket bucket = (LongTerms.Bucket) agg.getBuckets().get(i); + assertThat(bucket.getKey(), equalTo(9L - i)); + assertThat(bucket.getDocCount(), equalTo(9L - i)); + } + }, + null // without type hint ); - testSearchCase(query, dataset, - aggregation -> aggregation.field(LONG_FIELD), - agg -> { - assertEquals(9, agg.getBuckets().size()); - for (int i = 0; i < 9; i++) { - LongTerms.Bucket bucket = (LongTerms.Bucket) agg.getBuckets().get(i); - assertThat(bucket.getKey(), equalTo(9L - i)); - assertThat(bucket.getDocCount(), equalTo(9L - i)); - } - }, ValueType.NUMERIC //with type hint + testSearchCase(query, dataset, aggregation -> aggregation.field(LONG_FIELD), agg -> { + assertEquals(9, agg.getBuckets().size()); + for (int i = 0; i < 9; i++) { + LongTerms.Bucket bucket = (LongTerms.Bucket) agg.getBuckets().get(i); + assertThat(bucket.getKey(), equalTo(9L - i)); + assertThat(bucket.getDocCount(), equalTo(9L - i)); + } + }, + ValueType.NUMERIC // with type hint ); } @@ -92,29 +96,53 @@ public void testBadIncludeExclude() throws IOException { // Numerics don't support any regex include/exclude, so should fail no matter what we do - AggregationExecutionException e = expectThrows(AggregationExecutionException.class, - () -> testSearchCase(new MatchNoDocsQuery(), dataset, + AggregationExecutionException e = expectThrows( + AggregationExecutionException.class, + () -> testSearchCase( + new MatchNoDocsQuery(), + dataset, aggregation -> aggregation.field(LONG_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"), - agg -> fail("test should have failed with exception"), null - )); - assertThat(e.getMessage(), equalTo("Aggregation [_name] cannot support regular expression style " + - "include/exclude settings as they can only be applied to string fields. Use an array of numeric " + - "values for include/exclude clauses used to filter numeric fields")); - - e = expectThrows(AggregationExecutionException.class, - () -> testSearchCase(new MatchNoDocsQuery(), dataset, + agg -> fail("test should have failed with exception"), + null + ) + ); + assertThat( + e.getMessage(), + equalTo( + "Aggregation [_name] cannot support regular expression style " + + "include/exclude settings as they can only be applied to string fields. Use an array of numeric " + + "values for include/exclude clauses used to filter numeric fields" + ) + ); + + e = expectThrows( + AggregationExecutionException.class, + () -> testSearchCase( + new MatchNoDocsQuery(), + dataset, aggregation -> aggregation.field(LONG_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"), - agg -> fail("test should have failed with exception"), ValueType.NUMERIC // with type hint - )); - assertThat(e.getMessage(), equalTo("Aggregation [_name] cannot support regular expression style " + - "include/exclude settings as they can only be applied to string fields. Use an array of numeric " + - "values for include/exclude clauses used to filter numeric fields")); + agg -> fail("test should have failed with exception"), + ValueType.NUMERIC // with type hint + ) + ); + assertThat( + e.getMessage(), + equalTo( + "Aggregation [_name] cannot support regular expression style " + + "include/exclude settings as they can only be applied to string fields. Use an array of numeric " + + "values for include/exclude clauses used to filter numeric fields" + ) + ); } - private void testSearchCase(Query query, List dataset, - Consumer configure, - Consumer> verify, ValueType valueType) throws IOException { + private void testSearchCase( + Query query, + List dataset, + Consumer configure, + Consumer> verify, + ValueType valueType + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); @@ -137,8 +165,7 @@ private void testSearchCase(Query query, List dataset, configure.accept(aggregationBuilder); } - MappedFieldType longFieldType - = new NumberFieldMapper.NumberFieldType(LONG_FIELD, NumberFieldMapper.NumberType.LONG); + MappedFieldType longFieldType = new NumberFieldMapper.NumberFieldType(LONG_FIELD, NumberFieldMapper.NumberType.LONG); InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, longFieldType); verify.accept(rareTerms); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index 26dc0e10b1320..94c182ba2d8e1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -86,15 +86,19 @@ public class RareTermsAggregatorTests extends AggregatorTestCase { d.add(i); } } - dataset = d; + dataset = d; } public void testMatchNoDocs() throws IOException { - testSearchCase(new MatchNoDocsQuery(), dataset, + testSearchCase( + new MatchNoDocsQuery(), + dataset, aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1), agg -> assertEquals(0, agg.getBuckets().size()) ); - testSearchCase(new MatchNoDocsQuery(), dataset, + testSearchCase( + new MatchNoDocsQuery(), + dataset, aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1), agg -> assertEquals(0, agg.getBuckets().size()) ); @@ -103,24 +107,18 @@ public void testMatchNoDocs() throws IOException { public void testMatchAllDocs() throws IOException { Query query = new MatchAllDocsQuery(); - testSearchCase(query, dataset, - aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1), - agg -> { - assertEquals(1, agg.getBuckets().size()); - LongRareTerms.Bucket bucket = (LongRareTerms.Bucket) agg.getBuckets().get(0); - assertThat(bucket.getKey(), equalTo(1L)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - ); - testSearchCase(query, dataset, - aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1), - agg -> { - assertEquals(1, agg.getBuckets().size()); - StringRareTerms.Bucket bucket = (StringRareTerms.Bucket) agg.getBuckets().get(0); - assertThat(bucket.getKeyAsString(), equalTo("1")); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - ); + testSearchCase(query, dataset, aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1), agg -> { + assertEquals(1, agg.getBuckets().size()); + LongRareTerms.Bucket bucket = (LongRareTerms.Bucket) agg.getBuckets().get(0); + assertThat(bucket.getKey(), equalTo(1L)); + assertThat(bucket.getDocCount(), equalTo(1L)); + }); + testSearchCase(query, dataset, aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1), agg -> { + assertEquals(1, agg.getBuckets().size()); + StringRareTerms.Bucket bucket = (StringRareTerms.Bucket) agg.getBuckets().get(0); + assertThat(bucket.getKeyAsString(), equalTo("1")); + assertThat(bucket.getDocCount(), equalTo(1L)); + }); } public void testManyDocsOneRare() throws IOException { @@ -135,33 +133,29 @@ public void testManyDocsOneRare() throws IOException { // The one rare term d.add(0L); - testSearchCase(query, d, - aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1), - agg -> { - assertEquals(1, agg.getBuckets().size()); - LongRareTerms.Bucket bucket = (LongRareTerms.Bucket) agg.getBuckets().get(0); - assertThat(bucket.getKey(), equalTo(0L)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - ); - testSearchCase(query, d, - aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1), - agg -> { - assertEquals(1, agg.getBuckets().size()); - StringRareTerms.Bucket bucket = (StringRareTerms.Bucket) agg.getBuckets().get(0); - assertThat(bucket.getKeyAsString(), equalTo("0")); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - ); + testSearchCase(query, d, aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1), agg -> { + assertEquals(1, agg.getBuckets().size()); + LongRareTerms.Bucket bucket = (LongRareTerms.Bucket) agg.getBuckets().get(0); + assertThat(bucket.getKey(), equalTo(0L)); + assertThat(bucket.getDocCount(), equalTo(1L)); + }); + testSearchCase(query, d, aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1), agg -> { + assertEquals(1, agg.getBuckets().size()); + StringRareTerms.Bucket bucket = (StringRareTerms.Bucket) agg.getBuckets().get(0); + assertThat(bucket.getKeyAsString(), equalTo("0")); + assertThat(bucket.getDocCount(), equalTo(1L)); + }); } public void testIncludeExclude() throws IOException { Query query = new MatchAllDocsQuery(); - testSearchCase(query, dataset, + testSearchCase( + query, + dataset, aggregation -> aggregation.field(LONG_FIELD) .maxDocCount(2) // bump to 2 since we're only including "2" - .includeExclude(new IncludeExclude(new long[]{2}, new long[]{})), + .includeExclude(new IncludeExclude(new long[] { 2 }, new long[] {})), agg -> { assertEquals(1, agg.getBuckets().size()); LongRareTerms.Bucket bucket = (LongRareTerms.Bucket) agg.getBuckets().get(0); @@ -169,10 +163,12 @@ public void testIncludeExclude() throws IOException { assertThat(bucket.getDocCount(), equalTo(2L)); } ); - testSearchCase(query, dataset, + testSearchCase( + query, + dataset, aggregation -> aggregation.field(KEYWORD_FIELD) .maxDocCount(2) // bump to 2 since we're only including "2" - .includeExclude(new IncludeExclude(new String[]{"2"}, new String[]{})), + .includeExclude(new IncludeExclude(new String[] { "2" }, new String[] {})), agg -> { assertEquals(1, agg.getBuckets().size()); StringRareTerms.Bucket bucket = (StringRareTerms.Bucket) agg.getBuckets().get(0); @@ -186,56 +182,60 @@ public void testEmbeddedMaxAgg() throws IOException { Query query = new MatchAllDocsQuery(); testSearchCase(query, dataset, aggregation -> { - MaxAggregationBuilder max = new MaxAggregationBuilder("the_max").field(LONG_FIELD); - aggregation.field(LONG_FIELD).maxDocCount(1).subAggregation(max); - }, - agg -> { - assertEquals(1, agg.getBuckets().size()); - LongRareTerms.Bucket bucket = (LongRareTerms.Bucket) agg.getBuckets().get(0); - assertThat(bucket.getKey(), equalTo(1L)); - assertThat(bucket.getDocCount(), equalTo(1L)); - - Aggregations children = bucket.getAggregations(); - assertThat(children.asList().size(), equalTo(1)); - assertThat(children.asList().get(0).getName(), equalTo("the_max")); - assertThat(((Max)(children.asList().get(0))).getValue(), equalTo(1.0)); - } - ); + MaxAggregationBuilder max = new MaxAggregationBuilder("the_max").field(LONG_FIELD); + aggregation.field(LONG_FIELD).maxDocCount(1).subAggregation(max); + }, agg -> { + assertEquals(1, agg.getBuckets().size()); + LongRareTerms.Bucket bucket = (LongRareTerms.Bucket) agg.getBuckets().get(0); + assertThat(bucket.getKey(), equalTo(1L)); + assertThat(bucket.getDocCount(), equalTo(1L)); + + Aggregations children = bucket.getAggregations(); + assertThat(children.asList().size(), equalTo(1)); + assertThat(children.asList().get(0).getName(), equalTo("the_max")); + assertThat(((Max) (children.asList().get(0))).getValue(), equalTo(1.0)); + }); testSearchCase(query, dataset, aggregation -> { - MaxAggregationBuilder max = new MaxAggregationBuilder("the_max").field(LONG_FIELD); - aggregation.field(KEYWORD_FIELD).maxDocCount(1).subAggregation(max); - }, - agg -> { - assertEquals(1, agg.getBuckets().size()); - StringRareTerms.Bucket bucket = (StringRareTerms.Bucket) agg.getBuckets().get(0); - assertThat(bucket.getKey(), equalTo("1")); - assertThat(bucket.getDocCount(), equalTo(1L)); - - Aggregations children = bucket.getAggregations(); - assertThat(children.asList().size(), equalTo(1)); - assertThat(children.asList().get(0).getName(), equalTo("the_max")); - assertThat(((Max)(children.asList().get(0))).getValue(), equalTo(1.0)); - } - ); + MaxAggregationBuilder max = new MaxAggregationBuilder("the_max").field(LONG_FIELD); + aggregation.field(KEYWORD_FIELD).maxDocCount(1).subAggregation(max); + }, agg -> { + assertEquals(1, agg.getBuckets().size()); + StringRareTerms.Bucket bucket = (StringRareTerms.Bucket) agg.getBuckets().get(0); + assertThat(bucket.getKey(), equalTo("1")); + assertThat(bucket.getDocCount(), equalTo(1L)); + + Aggregations children = bucket.getAggregations(); + assertThat(children.asList().size(), equalTo(1)); + assertThat(children.asList().get(0).getName(), equalTo("the_max")); + assertThat(((Max) (children.asList().get(0))).getValue(), equalTo(1.0)); + }); } public void testEmpty() throws IOException { Query query = new MatchAllDocsQuery(); - testSearchCase(query, Collections.emptyList(), + testSearchCase( + query, + Collections.emptyList(), aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1), agg -> assertEquals(0, agg.getBuckets().size()) ); - testSearchCase(query, Collections.emptyList(), + testSearchCase( + query, + Collections.emptyList(), aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1), agg -> assertEquals(0, agg.getBuckets().size()) ); - testSearchCase(query, Collections.emptyList(), + testSearchCase( + query, + Collections.emptyList(), aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1), agg -> assertEquals(0, agg.getBuckets().size()) ); - testSearchCase(query, Collections.emptyList(), + testSearchCase( + query, + Collections.emptyList(), aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1), agg -> assertEquals(0, agg.getBuckets().size()) ); @@ -249,15 +249,13 @@ public void testUnmapped() throws Exception { document.add(new NumericDocValuesField("long", 0L)); indexWriter.addDocument(document); MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("another_string"); - MappedFieldType fieldType2 - = new NumberFieldMapper.NumberFieldType("another_long", NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("another_long", NumberFieldMapper.NumberType.LONG); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); - String[] fieldNames = new String[]{"string", "long"}; + String[] fieldNames = new String[] { "string", "long" }; for (int i = 0; i < fieldNames.length; i++) { - RareTermsAggregationBuilder aggregationBuilder = new RareTermsAggregationBuilder("_name") - .field(fieldNames[i]); + RareTermsAggregationBuilder aggregationBuilder = new RareTermsAggregationBuilder("_name").field(fieldNames[i]); Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType1, fieldType2); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -290,62 +288,53 @@ public void testRangeField() throws Exception { try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); - RareTermsAggregationBuilder aggregationBuilder = new RareTermsAggregationBuilder("_name") - .field("field"); - expectThrows(IllegalArgumentException.class, - () -> createAggregator(aggregationBuilder, indexSearcher, fieldType)); + RareTermsAggregationBuilder aggregationBuilder = new RareTermsAggregationBuilder("_name").field("field"); + expectThrows(IllegalArgumentException.class, () -> createAggregator(aggregationBuilder, indexSearcher, fieldType)); } } } } - public void testNestedTerms() throws IOException { Query query = new MatchAllDocsQuery(); testSearchCase(query, dataset, aggregation -> { - TermsAggregationBuilder terms = new TermsAggregationBuilder("the_terms") - .field(KEYWORD_FIELD); - aggregation.field(LONG_FIELD).maxDocCount(1).subAggregation(terms); - }, - agg -> { - assertEquals(1, agg.getBuckets().size()); - LongRareTerms.Bucket bucket = (LongRareTerms.Bucket) agg.getBuckets().get(0); - assertThat(bucket.getKey(), equalTo(1L)); - assertThat(bucket.getDocCount(), equalTo(1L)); - - Aggregations children = bucket.getAggregations(); - assertThat(children.asList().size(), equalTo(1)); - assertThat(children.asList().get(0).getName(), equalTo("the_terms")); - assertThat(((Terms)(children.asList().get(0))).getBuckets().size(), equalTo(1)); - assertThat(((Terms)(children.asList().get(0))).getBuckets().get(0).getKeyAsString(), equalTo("1")); - } - ); + TermsAggregationBuilder terms = new TermsAggregationBuilder("the_terms").field(KEYWORD_FIELD); + aggregation.field(LONG_FIELD).maxDocCount(1).subAggregation(terms); + }, agg -> { + assertEquals(1, agg.getBuckets().size()); + LongRareTerms.Bucket bucket = (LongRareTerms.Bucket) agg.getBuckets().get(0); + assertThat(bucket.getKey(), equalTo(1L)); + assertThat(bucket.getDocCount(), equalTo(1L)); + + Aggregations children = bucket.getAggregations(); + assertThat(children.asList().size(), equalTo(1)); + assertThat(children.asList().get(0).getName(), equalTo("the_terms")); + assertThat(((Terms) (children.asList().get(0))).getBuckets().size(), equalTo(1)); + assertThat(((Terms) (children.asList().get(0))).getBuckets().get(0).getKeyAsString(), equalTo("1")); + }); testSearchCase(query, dataset, aggregation -> { - TermsAggregationBuilder terms = new TermsAggregationBuilder("the_terms") - .field(KEYWORD_FIELD); - aggregation.field(KEYWORD_FIELD).maxDocCount(1).subAggregation(terms); - }, - agg -> { - assertEquals(1, agg.getBuckets().size()); - StringRareTerms.Bucket bucket = (StringRareTerms.Bucket) agg.getBuckets().get(0); - assertThat(bucket.getKey(), equalTo("1")); - assertThat(bucket.getDocCount(), equalTo(1L)); - - Aggregations children = bucket.getAggregations(); - assertThat(children.asList().size(), equalTo(1)); - assertThat(children.asList().get(0).getName(), equalTo("the_terms")); - assertThat(((Terms)(children.asList().get(0))).getBuckets().size(), equalTo(1)); - assertThat(((Terms)(children.asList().get(0))).getBuckets().get(0).getKeyAsString(), equalTo("1")); - } - ); + TermsAggregationBuilder terms = new TermsAggregationBuilder("the_terms").field(KEYWORD_FIELD); + aggregation.field(KEYWORD_FIELD).maxDocCount(1).subAggregation(terms); + }, agg -> { + assertEquals(1, agg.getBuckets().size()); + StringRareTerms.Bucket bucket = (StringRareTerms.Bucket) agg.getBuckets().get(0); + assertThat(bucket.getKey(), equalTo("1")); + assertThat(bucket.getDocCount(), equalTo(1L)); + + Aggregations children = bucket.getAggregations(); + assertThat(children.asList().size(), equalTo(1)); + assertThat(children.asList().get(0).getName(), equalTo("the_terms")); + assertThat(((Terms) (children.asList().get(0))).getBuckets().size(), equalTo(1)); + assertThat(((Terms) (children.asList().get(0))).getBuckets().get(0).getKeyAsString(), equalTo("1")); + }); } public void testInsideTerms() throws IOException { - for (String field : new String[] {KEYWORD_FIELD, LONG_FIELD}) { - AggregationBuilder builder = new TermsAggregationBuilder("terms").field("even_odd").subAggregation( - new RareTermsAggregationBuilder("rare").field(field).maxDocCount(2)); + for (String field : new String[] { KEYWORD_FIELD, LONG_FIELD }) { + AggregationBuilder builder = new TermsAggregationBuilder("terms").field("even_odd") + .subAggregation(new RareTermsAggregationBuilder("rare").field(field).maxDocCount(2)); StringTerms terms = executeTestCase(new MatchAllDocsQuery(), dataset, builder); StringTerms.Bucket even = terms.getBucketByKey("even"); @@ -374,19 +363,13 @@ public void testGlobalAggregationWithScore() throws IOException { indexWriter.addDocument(document); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); - GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global") - .subAggregation( - new RareTermsAggregationBuilder("terms") - .field("keyword") - .subAggregation( - new RareTermsAggregationBuilder("sub_terms") - .field("keyword") - .subAggregation( - new TopHitsAggregationBuilder("top_hits") - .storedField("_none_") - ) - ) - ); + GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( + new RareTermsAggregationBuilder("terms").field("keyword") + .subAggregation( + new RareTermsAggregationBuilder("sub_terms").field("keyword") + .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")) + ) + ); MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword"); @@ -396,7 +379,7 @@ public void testGlobalAggregationWithScore() throws IOException { for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { InternalMultiBucketAggregation subTerms = bucket.getAggregations().get("sub_terms"); assertThat(subTerms.getBuckets().size(), equalTo(1)); - MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); + MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); InternalTopHits topHits = subBucket.getAggregations().get("top_hits"); assertThat(topHits.getHits().getHits().length, equalTo(1)); for (SearchHit hit : topHits.getHits()) { @@ -420,17 +403,18 @@ public void testWithNestedAggregations() throws IOException { } indexWriter.commit(); - NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object") - .subAggregation(new RareTermsAggregationBuilder("terms") - .field("nested_value") - .maxDocCount(1) - ); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("nested_value", NumberFieldMapper.NumberType.LONG); + NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object").subAggregation( + new RareTermsAggregationBuilder("terms").field("nested_value").maxDocCount(1) + ); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("nested_value", NumberFieldMapper.NumberType.LONG); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - InternalNested result = searchAndReduce(newIndexSearcher(indexReader), + InternalNested result = searchAndReduce( + newIndexSearcher(indexReader), // match root document only - new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), nested, fieldType); + new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), + nested, + fieldType + ); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertThat(terms.getBuckets().size(), equalTo(1)); assertThat(terms.getBuckets().get(0).getKeyAsString(), equalTo("8")); @@ -451,27 +435,31 @@ public void testWithNestedScoringAggregations() throws IOException { indexWriter.addDocuments(generateDocsWithNested(Integer.toString(i), i, nestedValues)); } indexWriter.commit(); - for (boolean withScore : new boolean[]{true, false}) { - NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object") - .subAggregation(new RareTermsAggregationBuilder("terms") - .field("nested_value") + for (boolean withScore : new boolean[] { true, false }) { + NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object").subAggregation( + new RareTermsAggregationBuilder("terms").field("nested_value") .maxDocCount(2) .subAggregation( - new TopHitsAggregationBuilder("top_hits") - .sort(withScore ? new ScoreSortBuilder() : new FieldSortBuilder("_doc")) - .storedField("_none_") + new TopHitsAggregationBuilder("top_hits").sort( + withScore ? new ScoreSortBuilder() : new FieldSortBuilder("_doc") + ).storedField("_none_") ) - ); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("nested_value", NumberFieldMapper.NumberType.LONG); + ); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("nested_value", NumberFieldMapper.NumberType.LONG); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { if (withScore) { - IllegalStateException e = expectThrows(IllegalStateException.class, - () -> searchAndReduce(newIndexSearcher(indexReader), - // match root document only - new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), nested, fieldType)); + IllegalStateException e = expectThrows( + IllegalStateException.class, + () -> searchAndReduce( + newIndexSearcher(indexReader), + // match root document only + new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), + nested, + fieldType + ) + ); assertThat( e.getMessage(), equalTo( @@ -481,9 +469,13 @@ public void testWithNestedScoringAggregations() throws IOException { ) ); } else { - InternalNested result = searchAndReduce(newIndexSearcher(indexReader), + InternalNested result = searchAndReduce( + newIndexSearcher(indexReader), // match root document only - new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), nested, fieldType); + new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), + nested, + fieldType + ); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertThat(terms.getBuckets().size(), equalTo(2)); long counter = 1; @@ -503,6 +495,7 @@ public void testWithNestedScoringAggregations() throws IOException { } private final SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); + private List generateDocsWithNested(String id, int value, int[] nestedValues) { List documents = new ArrayList<>(); @@ -526,10 +519,10 @@ private List generateDocsWithNested(String id, int value, int[] nested @Override protected IndexSettings createIndexSettings() { - Settings nodeSettings = Settings.builder() - .put("search.max_buckets", 100000).build(); + Settings nodeSettings = Settings.builder().put("search.max_buckets", 100000).build(); return new IndexSettings( - IndexMetadata.builder("_index").settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) + IndexMetadata.builder("_index") + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) .numberOfShards(1) .numberOfReplicas(0) .creationDate(System.currentTimeMillis()) @@ -538,10 +531,12 @@ protected IndexSettings createIndexSettings() { ); } - private void testSearchCase(Query query, - List dataset, - Consumer configure, - Consumer> verify) throws IOException { + private void testSearchCase( + Query query, + List dataset, + Consumer configure, + Consumer> verify + ) throws IOException { RareTermsAggregationBuilder aggregationBuilder = new RareTermsAggregationBuilder("_name"); if (configure != null) { configure.accept(aggregationBuilder); @@ -550,9 +545,8 @@ private void testSearchCase(Query query, } - private A executeTestCase(Query query, - List dataset, - AggregationBuilder aggregationBuilder) throws IOException { + private A executeTestCase(Query query, List dataset, AggregationBuilder aggregationBuilder) + throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); @@ -578,7 +572,7 @@ private A executeTestCase(Query query, MappedFieldType[] types = new MappedFieldType[] { keywordField(KEYWORD_FIELD), longField(LONG_FIELD), - keywordField("even_odd")}; + keywordField("even_odd") }; return searchAndReduce(indexSearcher, query, aggregationBuilder, types); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java index e2b4cb8974f1c..c051ee3811756 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java @@ -30,25 +30,37 @@ public void setUp() throws Exception { } @Override - protected InternalSignificantTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggs, - int requiredSize, int numBuckets, - long subsetSize, int[] subsetDfs, - long supersetSize, int[] supersetDfs, - SignificanceHeuristic significanceHeuristic) { + protected InternalSignificantTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggs, + int requiredSize, + int numBuckets, + long subsetSize, + int[] subsetDfs, + long supersetSize, + int[] supersetDfs, + SignificanceHeuristic significanceHeuristic + ) { List buckets = new ArrayList<>(numBuckets); Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { long term = randomValueOtherThanMany(l -> terms.add(l) == false, random()::nextLong); - SignificantLongTerms.Bucket bucket = new SignificantLongTerms.Bucket(subsetDfs[i], subsetSize, - supersetDfs[i], supersetSize, term, aggs, format, 0); + SignificantLongTerms.Bucket bucket = new SignificantLongTerms.Bucket( + subsetDfs[i], + subsetSize, + supersetDfs[i], + supersetSize, + term, + aggs, + format, + 0 + ); bucket.updateScore(significanceHeuristic); buckets.add(bucket); } - return new SignificantLongTerms(name, requiredSize, 1L, metadata, format, subsetSize, - supersetSize, significanceHeuristic, buckets); + return new SignificantLongTerms(name, requiredSize, 1L, metadata, format, subsetSize, supersetSize, significanceHeuristic, buckets); } @Override @@ -70,64 +82,83 @@ protected Class implementationClass() { SignificanceHeuristic significanceHeuristic = longTerms.significanceHeuristic; Map metadata = longTerms.getMetadata(); switch (between(0, 5)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - requiredSize += between(1, 100); - break; - case 2: - minDocCount += between(1, 100); - break; - case 3: - subsetSize += between(1, 100); - break; - case 4: - supersetSize += between(1, 100); - break; - case 5: - buckets = new ArrayList<>(buckets); - buckets.add(new SignificantLongTerms.Bucket(randomLong(), randomNonNegativeLong(), randomNonNegativeLong(), - randomNonNegativeLong(), randomNonNegativeLong(), InternalAggregations.EMPTY, format, 0)); - break; - case 8: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + requiredSize += between(1, 100); + break; + case 2: + minDocCount += between(1, 100); + break; + case 3: + subsetSize += between(1, 100); + break; + case 4: + supersetSize += between(1, 100); + break; + case 5: + buckets = new ArrayList<>(buckets); + buckets.add( + new SignificantLongTerms.Bucket( + randomLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + InternalAggregations.EMPTY, + format, + 0 + ) + ); + break; + case 8: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } - return new SignificantLongTerms(name, requiredSize, minDocCount, metadata, format, subsetSize, - supersetSize, significanceHeuristic, buckets); + return new SignificantLongTerms( + name, + requiredSize, + minDocCount, + metadata, + format, + subsetSize, + supersetSize, + significanceHeuristic, + buckets + ); } else { String name = instance.getName(); int requiredSize = instance.requiredSize; long minDocCount = instance.minDocCount; Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - requiredSize += between(1, 100); - break; - case 2: - minDocCount += between(1, 100); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + requiredSize += between(1, 100); + break; + case 2: + minDocCount += between(1, 100); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new UnmappedSignificantTerms(name, requiredSize, minDocCount, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java index 31d28e078feb1..a112c5a46ac4c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java @@ -23,25 +23,47 @@ public class SignificantStringTermsTests extends InternalSignificantTermsTestCase { @Override - protected InternalSignificantTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggs, - int requiredSize, int numBuckets, - long subsetSize, int[] subsetDfs, - long supersetSize, int[] supersetDfs, - SignificanceHeuristic significanceHeuristic) { + protected InternalSignificantTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggs, + int requiredSize, + int numBuckets, + long subsetSize, + int[] subsetDfs, + long supersetSize, + int[] supersetDfs, + SignificanceHeuristic significanceHeuristic + ) { DocValueFormat format = DocValueFormat.RAW; List buckets = new ArrayList<>(numBuckets); Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { BytesRef term = randomValueOtherThanMany(b -> terms.add(b) == false, () -> new BytesRef(randomAlphaOfLength(10))); - SignificantStringTerms.Bucket bucket = new SignificantStringTerms.Bucket(term, subsetDfs[i], subsetSize, - supersetDfs[i], supersetSize, aggs, format, 0); + SignificantStringTerms.Bucket bucket = new SignificantStringTerms.Bucket( + term, + subsetDfs[i], + subsetSize, + supersetDfs[i], + supersetSize, + aggs, + format, + 0 + ); bucket.updateScore(significanceHeuristic); buckets.add(bucket); } - return new SignificantStringTerms(name, requiredSize, 1L, metadata, format, subsetSize, - supersetSize, significanceHeuristic, buckets); + return new SignificantStringTerms( + name, + requiredSize, + 1L, + metadata, + format, + subsetSize, + supersetSize, + significanceHeuristic, + buckets + ); } @Override @@ -63,65 +85,83 @@ protected Class implementationClass() { SignificanceHeuristic significanceHeuristic = stringTerms.significanceHeuristic; Map metadata = stringTerms.getMetadata(); switch (between(0, 5)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - requiredSize += between(1, 100); - break; - case 2: - minDocCount += between(1, 100); - break; - case 3: - subsetSize += between(1, 100); - break; - case 4: - supersetSize += between(1, 100); - break; - case 5: - buckets = new ArrayList<>(buckets); - buckets.add(new SignificantStringTerms.Bucket(new BytesRef(randomAlphaOfLengthBetween(1, 10)), - randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), - InternalAggregations.EMPTY, format, 0)); - break; - case 8: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + requiredSize += between(1, 100); + break; + case 2: + minDocCount += between(1, 100); + break; + case 3: + subsetSize += between(1, 100); + break; + case 4: + supersetSize += between(1, 100); + break; + case 5: + buckets = new ArrayList<>(buckets); + buckets.add( + new SignificantStringTerms.Bucket( + new BytesRef(randomAlphaOfLengthBetween(1, 10)), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + InternalAggregations.EMPTY, + format, + 0 + ) + ); + break; + case 8: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } - return new SignificantStringTerms(name, requiredSize, minDocCount, metadata, format, subsetSize, - supersetSize, significanceHeuristic, buckets); + return new SignificantStringTerms( + name, + requiredSize, + minDocCount, + metadata, + format, + subsetSize, + supersetSize, + significanceHeuristic, + buckets + ); } else { String name = instance.getName(); int requiredSize = instance.requiredSize; long minDocCount = instance.minDocCount; Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - requiredSize += between(1, 100); - break; - case 2: - minDocCount += between(1, 100); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + requiredSize += between(1, 100); + break; + case 2: + minDocCount += between(1, 100); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new UnmappedSignificantTerms(name, requiredSize, minDocCount, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java index 1098776800e63..a6c1adcc6f9e9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java @@ -76,11 +76,13 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy @Override protected List getSupportedValuesSourceTypes() { - return List.of(CoreValuesSourceType.NUMERIC, + return List.of( + CoreValuesSourceType.NUMERIC, CoreValuesSourceType.KEYWORD, CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.DATE, - CoreValuesSourceType.IP); + CoreValuesSourceType.IP + ); } @Override @@ -93,7 +95,7 @@ protected List unsupportedMappedFieldTypes() { ); } - public void testSignificance(SignificanceHeuristic heuristic) throws IOException { + public void testSignificance(SignificanceHeuristic heuristic) throws IOException { TextFieldType textFieldType = new TextFieldType("text"); textFieldType.setFielddata(true); @@ -108,7 +110,7 @@ public void testSignificance(SignificanceHeuristic heuristic) throws IOException sigAgg.executionHint(randomExecutionHint()); if (randomBoolean()) { // Use a background filter which just happens to be same scope as whole-index. - sigAgg.backgroundFilter(QueryBuilders.termsQuery("text", "common")); + sigAgg.backgroundFilter(QueryBuilders.termsQuery("text", "common")); } SignificantTermsAggregationBuilder sigNumAgg = new SignificantTermsAggregationBuilder("sig_number").field("long_field"); @@ -146,8 +148,8 @@ public void testSignificance(SignificanceHeuristic heuristic) throws IOException assertNull(terms.getBucketByKey("even")); // Search with string-based includeexcludes - String oddStrings[] = new String[] {"odd", "weird"}; - String evenStrings[] = new String[] {"even", "regular"}; + String oddStrings[] = new String[] { "odd", "weird" }; + String evenStrings[] = new String[] { "even", "regular" }; sigAgg.includeExclude(new IncludeExclude(oddStrings, evenStrings)); sigAgg.significanceHeuristic(heuristic); @@ -186,8 +188,7 @@ public void testSignificance() throws IOException { * fields */ public void testNumericSignificance() throws IOException { - NumberFieldType longFieldType - = new NumberFieldMapper.NumberFieldType("long_field", NumberFieldMapper.NumberType.LONG); + NumberFieldType longFieldType = new NumberFieldMapper.NumberFieldType("long_field", NumberFieldMapper.NumberType.LONG); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); indexWriterConfig.setMaxBufferedDocs(100); @@ -288,8 +289,7 @@ public void testRangeField() throws IOException { new RangeFieldMapper.Range(rangeType, 1L, 5L, true, true), new RangeFieldMapper.Range(rangeType, -3L, 4L, true, true), new RangeFieldMapper.Range(rangeType, 4L, 13L, true, true), - new RangeFieldMapper.Range(rangeType, 42L, 49L, true, true), - }) { + new RangeFieldMapper.Range(rangeType, 42L, 49L, true, true), }) { Document doc = new Document(); BytesRef encodedRange = rangeType.encodeRanges(Collections.singleton(range)); doc.add(new BinaryDocValuesField("field", encodedRange)); @@ -336,18 +336,19 @@ public void testFieldAlias() throws IOException { assertEquals("test expects a single segment", 1, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); - SignificantTerms evenTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), - agg, textFieldType); - SignificantTerms aliasEvenTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), - aliasAgg, textFieldType); + SignificantTerms evenTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), agg, textFieldType); + SignificantTerms aliasEvenTerms = searchAndReduce( + searcher, + new TermQuery(new Term("text", "even")), + aliasAgg, + textFieldType + ); assertFalse(evenTerms.getBuckets().isEmpty()); assertEquals(evenTerms, aliasEvenTerms); - SignificantTerms oddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), - agg, textFieldType); - SignificantTerms aliasOddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), - aliasAgg, textFieldType); + SignificantTerms oddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), agg, textFieldType); + SignificantTerms aliasOddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aliasAgg, textFieldType); assertFalse(oddTerms.getBuckets().isEmpty()); assertEquals(oddTerms, aliasOddTerms); @@ -540,11 +541,21 @@ public void testThreeLayerLong() throws IOException { } try (IndexReader reader = maybeWrapReaderEs(writer.getReader())) { IndexSearcher searcher = newIndexSearcher(reader); - SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("i").field("i").minDocCount(0) - .subAggregation(new SignificantTermsAggregationBuilder("j").field("j").minDocCount(0) - .subAggregation(new SignificantTermsAggregationBuilder("k").field("k").minDocCount(0))); - SignificantLongTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, - longField("i"), longField("j"), longField("k")); + SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("i").field("i") + .minDocCount(0) + .subAggregation( + new SignificantTermsAggregationBuilder("j").field("j") + .minDocCount(0) + .subAggregation(new SignificantTermsAggregationBuilder("k").field("k").minDocCount(0)) + ); + SignificantLongTerms result = searchAndReduce( + searcher, + new MatchAllDocsQuery(), + request, + longField("i"), + longField("j"), + longField("k") + ); assertThat(result.getSubsetSize(), equalTo(1000L)); for (int i = 0; i < 10; i++) { SignificantLongTerms.Bucket iBucket = result.getBucketByKey(Integer.toString(i)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java index 4e3ae03638372..dcdd6436c4cad 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java @@ -54,10 +54,7 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy @Override protected List getSupportedValuesSourceTypes() { - return List.of( - CoreValuesSourceType.BOOLEAN, - CoreValuesSourceType.KEYWORD - ); + return List.of(CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.KEYWORD); } @Override @@ -80,11 +77,10 @@ public void testSignificance() throws IOException { indexDocuments(w); SignificantTextAggregationBuilder sigAgg = new SignificantTextAggregationBuilder("sig_text", "text").filterDuplicateText(true); - if(randomBoolean()){ - sigAgg.sourceFieldNames(Arrays.asList(new String [] {"json_only_field"})); + if (randomBoolean()) { + sigAgg.sourceFieldNames(Arrays.asList(new String[] { "json_only_field" })); } - SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler") - .subAggregation(sigAgg); + SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler").subAggregation(sigAgg); try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); @@ -129,7 +125,7 @@ public void testIncludeExcludes() throws IOException { try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { indexDocuments(w); - String [] incExcValues = {"duplicate"}; + String[] incExcValues = { "duplicate" }; try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); @@ -137,12 +133,12 @@ public void testIncludeExcludes() throws IOException { // Inclusive of values { - SignificantTextAggregationBuilder sigAgg = new SignificantTextAggregationBuilder("sig_text", "text"). - includeExclude(new IncludeExclude(incExcValues, null)); - SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler") - .subAggregation(sigAgg); - if(randomBoolean()){ - sigAgg.sourceFieldNames(Arrays.asList(new String [] {"json_only_field"})); + SignificantTextAggregationBuilder sigAgg = new SignificantTextAggregationBuilder("sig_text", "text").includeExclude( + new IncludeExclude(incExcValues, null) + ); + SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler").subAggregation(sigAgg); + if (randomBoolean()) { + sigAgg.sourceFieldNames(Arrays.asList(new String[] { "json_only_field" })); } // Search "even" which should have duplication InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aggBuilder, textFieldType); @@ -155,12 +151,12 @@ public void testIncludeExcludes() throws IOException { } // Exclusive of values { - SignificantTextAggregationBuilder sigAgg = new SignificantTextAggregationBuilder("sig_text", "text"). - includeExclude(new IncludeExclude(null, incExcValues)); - SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler") - .subAggregation(sigAgg); - if(randomBoolean()){ - sigAgg.sourceFieldNames(Arrays.asList(new String [] {"json_only_field"})); + SignificantTextAggregationBuilder sigAgg = new SignificantTextAggregationBuilder("sig_text", "text").includeExclude( + new IncludeExclude(null, incExcValues) + ); + SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler").subAggregation(sigAgg); + if (randomBoolean()) { + sigAgg.sourceFieldNames(Arrays.asList(new String[] { "json_only_field" })); } // Search "even" which should have duplication InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aggBuilder, textFieldType); @@ -186,17 +182,16 @@ public void testMissingField() throws IOException { SignificantTextAggregationBuilder sigAgg = new SignificantTextAggregationBuilder("sig_text", "this_field_does_not_exist") .filterDuplicateText(true); - if(randomBoolean()){ - sigAgg.sourceFieldNames(Arrays.asList(new String [] {"json_only_field"})); + if (randomBoolean()) { + sigAgg.sourceFieldNames(Arrays.asList(new String[] { "json_only_field" })); } - SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler") - .subAggregation(sigAgg); + SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler").subAggregation(sigAgg); try (IndexReader reader = DirectoryReader.open(w)) { IndexSearcher searcher = new IndexSearcher(reader); InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aggBuilder, textFieldType); SignificantTerms terms = sampler.getAggregations().get("sig_text"); - assertTrue(terms.getBuckets().isEmpty()); + assertTrue(terms.getBuckets().isEmpty()); } } } @@ -210,13 +205,11 @@ public void testFieldAlias() throws IOException { try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { indexDocuments(w); - SignificantTextAggregationBuilder agg = significantText("sig_text", "text") - .filterDuplicateText(true); - SignificantTextAggregationBuilder aliasAgg = significantText("sig_text", "text-alias") - .filterDuplicateText(true); + SignificantTextAggregationBuilder agg = significantText("sig_text", "text").filterDuplicateText(true); + SignificantTextAggregationBuilder aliasAgg = significantText("sig_text", "text-alias").filterDuplicateText(true); if (randomBoolean()) { - List sourceFieldNames = Arrays.asList(new String [] {"json_only_field"}); + List sourceFieldNames = Arrays.asList(new String[] { "json_only_field" }); agg.sourceFieldNames(sourceFieldNames); aliasAgg.sourceFieldNames(sourceFieldNames); } @@ -229,8 +222,12 @@ public void testFieldAlias() throws IOException { SamplerAggregationBuilder aliasSamplerAgg = sampler("sampler").subAggregation(aliasAgg); InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), samplerAgg, textFieldType); - InternalSampler aliasSampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), - aliasSamplerAgg, textFieldType); + InternalSampler aliasSampler = searchAndReduce( + searcher, + new TermQuery(new Term("text", "odd")), + aliasSamplerAgg, + textFieldType + ); SignificantTerms terms = sampler.getAggregations().get("sig_text"); SignificantTerms aliasTerms = aliasSampler.getAggregations().get("sig_text"); @@ -297,9 +294,7 @@ private void indexDocuments(IndexWriter writer) throws IOException { } doc.add(new Field("text", text.toString(), TextFieldMapper.Defaults.FIELD_TYPE)); - String json ="{ \"text\" : \"" + text.toString() + "\","+ - " \"json_only_field\" : \"" + text.toString() + "\"" + - " }"; + String json = "{ \"text\" : \"" + text.toString() + "\"," + " \"json_only_field\" : \"" + text.toString() + "\"" + " }"; doc.add(new StoredField("_source", new BytesRef(json))); doc.add(new SortedSetDocValuesField("kwd", i % 2 == 0 ? new BytesRef("even") : new BytesRef("odd"))); doc.add(new Field("kwd", i % 2 == 0 ? new BytesRef("even") : new BytesRef("odd"), KeywordFieldMapper.Defaults.FIELD_TYPE)); @@ -320,13 +315,13 @@ public void testSignificanceOnTextArrays() throws IOException { for (int i = 0; i < 10; i++) { Document doc = new Document(); doc.add(new Field("text", "foo", TextFieldMapper.Defaults.FIELD_TYPE)); - String json ="{ \"text\" : [\"foo\",\"foo\"], \"title\" : [\"foo\", \"foo\"]}"; + String json = "{ \"text\" : [\"foo\",\"foo\"], \"title\" : [\"foo\", \"foo\"]}"; doc.add(new StoredField("_source", new BytesRef(json))); w.addDocument(doc); } SignificantTextAggregationBuilder sigAgg = new SignificantTextAggregationBuilder("sig_text", "text"); - sigAgg.sourceFieldNames(Arrays.asList(new String [] {"title", "text"})); + sigAgg.sourceFieldNames(Arrays.asList(new String[] { "title", "text" })); try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsTests.java index db262aca4e5f2..96a39ee401087 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsTests.java @@ -25,10 +25,12 @@ public class StringRareTermsTests extends InternalRareTermsTestCase { @Override - protected InternalRareTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - long maxDocCount) { + protected InternalRareTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + long maxDocCount + ) { BucketOrder order = BucketOrder.count(false); DocValueFormat format = DocValueFormat.RAW; List buckets = new ArrayList<>(); @@ -67,8 +69,14 @@ protected Class implementationClass() { break; case 2: buckets = new ArrayList<>(buckets); - buckets.add(new StringRareTerms.Bucket(new BytesRef(randomAlphaOfLengthBetween(1, 10)), randomNonNegativeLong(), - InternalAggregations.EMPTY, format)); + buckets.add( + new StringRareTerms.Bucket( + new BytesRef(randomAlphaOfLengthBetween(1, 10)), + randomNonNegativeLong(), + InternalAggregations.EMPTY, + format + ) + ); break; case 3: if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java index 2cb70f42a1d60..4a8bf55fd1258 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java @@ -23,11 +23,13 @@ public class StringTermsTests extends InternalTermsTestCase { @Override - protected InternalTerms createTestInstance(String name, - Map metadata, - InternalAggregations aggregations, - boolean showTermDocCountError, - long docCountError) { + protected InternalTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + boolean showTermDocCountError, + long docCountError + ) { return createTestInstance(generateRandomDict(), name, metadata, aggregations, showTermDocCountError, docCountError); } @@ -86,8 +88,16 @@ protected Class implementationClass() { break; case 7: buckets = new ArrayList<>(buckets); - buckets.add(new StringTerms.Bucket(new BytesRef(randomAlphaOfLengthBetween(1, 10)), randomNonNegativeLong(), - InternalAggregations.EMPTY, showTermDocCountError, docCountError, format)); + buckets.add( + new StringTerms.Bucket( + new BytesRef(randomAlphaOfLengthBetween(1, 10)), + randomNonNegativeLong(), + InternalAggregations.EMPTY, + showTermDocCountError, + docCountError, + format + ) + ); break; case 8: if (metadata == null) { @@ -101,8 +111,20 @@ protected Class implementationClass() { throw new AssertionError("Illegal randomisation branch"); } Collections.sort(buckets, stringTerms.reduceOrder.comparator()); - return new StringTerms(name, stringTerms.reduceOrder, order, requiredSize, minDocCount, metadata, format, shardSize, - showTermDocCountError, otherDocCount, buckets, docCountError); + return new StringTerms( + name, + stringTerms.reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } else { String name = instance.getName(); BucketOrder order = instance.order; @@ -147,12 +169,14 @@ private BytesRef[] generateRandomDict() { return createTestInstance(dict, name, createTestMetadata(), createSubAggregations(), showDocCount, docCountError); } - private InternalTerms createTestInstance(BytesRef[] dict, - String name, - Map metadata, - InternalAggregations aggregations, - boolean showTermDocCountError, - long docCountError) { + private InternalTerms createTestInstance( + BytesRef[] dict, + String name, + Map metadata, + InternalAggregations aggregations, + boolean showTermDocCountError, + long docCountError + ) { BucketOrder order = BucketOrder.count(false); long minDocCount = 1; int requiredSize = 3; @@ -163,16 +187,29 @@ private BytesRef[] generateRandomDict() { final int numBuckets = randomNumberOfBuckets(); Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { - BytesRef term = dict[randomIntBetween(0, dict.length-1)]; + BytesRef term = dict[randomIntBetween(0, dict.length - 1)]; if (terms.add(term)) { int docCount = randomIntBetween(1, 100); buckets.add(new StringTerms.Bucket(term, docCount, aggregations, showTermDocCountError, docCountError, format)); } } - BucketOrder reduceOrder = randomBoolean() ? - BucketOrder.compound(BucketOrder.key(true), BucketOrder.count(false)) : BucketOrder.key(true); + BucketOrder reduceOrder = randomBoolean() + ? BucketOrder.compound(BucketOrder.key(true), BucketOrder.count(false)) + : BucketOrder.key(true); Collections.sort(buckets, reduceOrder.comparator()); - return new StringTerms(name, reduceOrder, order, requiredSize, minDocCount, - metadata, format, shardSize, showTermDocCountError, otherDocCount, buckets, docCountError); + return new StringTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + format, + shardSize, + showTermDocCountError, + otherDocCount, + buckets, + docCountError + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactoryTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactoryTests.java index f667d21c84d10..088af84dd6742 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactoryTests.java @@ -20,26 +20,24 @@ public class TermsAggregatorFactoryTests extends ESTestCase { public void testPickEmpty() throws Exception { AggregatorFactories empty = mock(AggregatorFactories.class); when(empty.countAggregators()).thenReturn(0); - assertThat(TermsAggregatorFactory.pickSubAggColectMode(empty, randomInt(), randomInt()), - equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST)); + assertThat( + TermsAggregatorFactory.pickSubAggColectMode(empty, randomInt(), randomInt()), + equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST) + ); } public void testPickNonEempty() { AggregatorFactories nonEmpty = mock(AggregatorFactories.class); when(nonEmpty.countAggregators()).thenReturn(1); - assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, Integer.MAX_VALUE, -1), - equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST)); - assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 10, -1), - equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST)); - assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 10, 5), - equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST)); - assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 10, 10), - equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST)); - assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 10, 100), - equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST)); - assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 1, 2), - equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST)); - assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 1, 100), - equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST)); + assertThat( + TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, Integer.MAX_VALUE, -1), + equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST) + ); + assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 10, -1), equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST)); + assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 10, 5), equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST)); + assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 10, 10), equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST)); + assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 10, 100), equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST)); + assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 1, 2), equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST)); + assertThat(TermsAggregatorFactory.pickSubAggColectMode(nonEmpty, 1, 100), equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index d056c94997f57..8b6c8028c43bb 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -161,10 +161,12 @@ protected ScriptService getMockScriptService() { scripts.put(STRING_SCRIPT_NAME, value -> STRING_SCRIPT_OUTPUT); - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, + MockScriptEngine scriptEngine = new MockScriptEngine( + MockScriptEngine.NAME, scripts, nonDeterministicScripts, - Collections.emptyMap()); + Collections.emptyMap() + ); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); @@ -191,11 +193,13 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy @Override protected List getSupportedValuesSourceTypes() { - return List.of(CoreValuesSourceType.NUMERIC, + return List.of( + CoreValuesSourceType.NUMERIC, CoreValuesSourceType.KEYWORD, CoreValuesSourceType.IP, CoreValuesSourceType.DATE, - CoreValuesSourceType.BOOLEAN); + CoreValuesSourceType.BOOLEAN + ); } public void testUsesGlobalOrdinalsByDefault() throws Exception { @@ -218,32 +222,28 @@ public void testUsesGlobalOrdinalsByDefault() throws Exception { assertThat(globalAgg.descriptCollectionStrategy(), equalTo("dense")); // Infers depth_first because the maxOrd is 0 which is less than the size - aggregationBuilder - .subAggregation(AggregationBuilders.cardinality("card").field("string")); + aggregationBuilder.subAggregation(AggregationBuilders.cardinality("card").field("string")); aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; assertThat(globalAgg.collectMode, equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST)); assertThat(globalAgg.descriptCollectionStrategy(), equalTo("remap using single bucket ords")); - aggregationBuilder - .collectMode(Aggregator.SubAggCollectionMode.DEPTH_FIRST); + aggregationBuilder.collectMode(Aggregator.SubAggCollectionMode.DEPTH_FIRST); aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; assertThat(globalAgg.collectMode, equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST)); assertThat(globalAgg.descriptCollectionStrategy(), equalTo("remap using single bucket ords")); - aggregationBuilder - .collectMode(Aggregator.SubAggCollectionMode.BREADTH_FIRST); + aggregationBuilder.collectMode(Aggregator.SubAggCollectionMode.BREADTH_FIRST); aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; assertThat(globalAgg.collectMode, equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST)); assertThat(globalAgg.descriptCollectionStrategy(), equalTo("dense")); - aggregationBuilder - .order(BucketOrder.aggregation("card", true)); + aggregationBuilder.order(BucketOrder.aggregation("card", true)); aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; @@ -255,10 +255,9 @@ public void testUsesGlobalOrdinalsByDefault() throws Exception { public void testSimple() throws Exception { MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("string", randomBoolean(), true, Collections.emptyMap()); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name") - .executionHint(randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString()) - .field("string") - .order(BucketOrder.key(true)); + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").executionHint( + randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString() + ).field("string").order(BucketOrder.key(true)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(doc(fieldType, "a", "b")); iw.addDocument(doc(fieldType, "", "c", "a")); @@ -283,8 +282,7 @@ public void testSimple() throws Exception { public void testStringShardMinDocCount() throws IOException { MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("string", true, true, Collections.emptyMap()); for (TermsAggregatorFactory.ExecutionMode executionMode : TermsAggregatorFactory.ExecutionMode.values()) { - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name") - .field("string") + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").field("string") .executionHint(executionMode.toString()) .size(2) .minDocCount(2) @@ -292,11 +290,9 @@ public void testStringShardMinDocCount() throws IOException { .order(BucketOrder.key(true)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { // force single shard/segment - iw.addDocuments(Arrays.asList( - doc(fieldType, "a", "b"), - doc(fieldType, "", "c", "d"), - doc(fieldType, "b", "d"), - doc(fieldType, "b"))); + iw.addDocuments( + Arrays.asList(doc(fieldType, "a", "b"), doc(fieldType, "", "c", "d"), doc(fieldType, "b", "d"), doc(fieldType, "b")) + ); }, (InternalTerms result) -> { assertEquals(2, result.getBuckets().size()); assertEquals("b", result.getBuckets().get(0).getKeyAsString()); @@ -327,8 +323,10 @@ public void testManyTerms() throws Exception { } iw.addDocuments(docs); }, (StringTerms result) -> { - assertThat(result.getBuckets().stream().map(StringTerms.Bucket::getKey).collect(toList()), - equalTo(List.of("b007", "b107", "b207", "b307", "b407", "b507", "b607", "b707", "b000", "b001"))); + assertThat( + result.getBuckets().stream().map(StringTerms.Bucket::getKey).collect(toList()), + equalTo(List.of("b007", "b107", "b207", "b307", "b407", "b507", "b607", "b707", "b000", "b001")) + ); }, fieldType); } @@ -336,8 +334,7 @@ public void testManyTermsOrderBySubAgg() throws Exception { MappedFieldType kft = new KeywordFieldMapper.KeywordFieldType("string", randomBoolean(), true, Collections.emptyMap()); MappedFieldType lft = new NumberFieldType("long", NumberType.LONG); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name") - .executionHint(randomHint()) + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").executionHint(randomHint()) .order(BucketOrder.aggregation("max", false)) .field("string") .subAggregation(new MaxAggregationBuilder("max").field("long")); @@ -646,14 +643,12 @@ public void testNumericIncludeExclude() throws Exception { indexWriter.addDocument(document); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("long_field", NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("long_field", NumberFieldMapper.NumberType.LONG); String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name") - .userValueTypeHint(ValueType.LONG) + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.LONG) .executionHint(executionHint) - .includeExclude(new IncludeExclude(new long[]{0, 5}, null)) + .includeExclude(new IncludeExclude(new long[] { 0, 5 }, null)) .field("long_field") .order(BucketOrder.key(true)); AggregationContext context = createAggregationContext(indexSearcher, null, fieldType); @@ -671,7 +666,7 @@ public void testNumericIncludeExclude() throws Exception { aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.LONG) .executionHint(executionHint) - .includeExclude(new IncludeExclude(null, new long[]{0, 5})) + .includeExclude(new IncludeExclude(null, new long[] { 0, 5 })) .field("long_field") .order(BucketOrder.key(true)); context = createAggregationContext(indexSearcher, null, fieldType); @@ -691,11 +686,10 @@ public void testNumericIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(3).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue((InternalTerms) result)); - fieldType - = new NumberFieldMapper.NumberFieldType("double_field", NumberFieldMapper.NumberType.DOUBLE); + fieldType = new NumberFieldMapper.NumberFieldType("double_field", NumberFieldMapper.NumberType.DOUBLE); aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.DOUBLE) .executionHint(executionHint) - .includeExclude(new IncludeExclude(new double[]{0.0, 5.0}, null)) + .includeExclude(new IncludeExclude(new double[] { 0.0, 5.0 }, null)) .field("double_field") .order(BucketOrder.key(true)); context = createAggregationContext(indexSearcher, null, fieldType); @@ -713,7 +707,7 @@ public void testNumericIncludeExclude() throws Exception { aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.DOUBLE) .executionHint(executionHint) - .includeExclude(new IncludeExclude(null, new double[]{0.0, 5.0})) + .includeExclude(new IncludeExclude(null, new double[] { 0.0, 5.0 })) .field("double_field") .order(BucketOrder.key(true)); context = createAggregationContext(indexSearcher, null, fieldType); @@ -751,8 +745,7 @@ public void testStringTermsAggregator() throws Exception { } return result; }; - termsAggregator(ValueType.STRING, fieldType, i -> Integer.toString(i), - String::compareTo, luceneFieldFactory); + termsAggregator(ValueType.STRING, fieldType, i -> Integer.toString(i), String::compareTo, luceneFieldFactory); termsAggregatorWithNestedMaxAgg(ValueType.STRING, fieldType, i -> Integer.toString(i), val -> luceneFieldFactory.apply(val, false)); } @@ -764,8 +757,7 @@ public void testLongTermsAggregator() throws Exception { return List.of(new NumericDocValuesField("field", val)); } }; - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); termsAggregator(ValueType.LONG, fieldType, Integer::longValue, Long::compareTo, luceneFieldFactory); termsAggregatorWithNestedMaxAgg(ValueType.LONG, fieldType, Integer::longValue, val -> luceneFieldFactory.apply(val, false)); } @@ -778,11 +770,9 @@ public void testDoubleTermsAggregator() throws Exception { return List.of(new NumericDocValuesField("field", Double.doubleToRawLongBits(val))); } }; - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); termsAggregator(ValueType.DOUBLE, fieldType, Integer::doubleValue, Double::compareTo, luceneFieldFactory); - termsAggregatorWithNestedMaxAgg(ValueType.DOUBLE, fieldType, Integer::doubleValue, - val -> luceneFieldFactory.apply(val, false)); + termsAggregatorWithNestedMaxAgg(ValueType.DOUBLE, fieldType, Integer::doubleValue, val -> luceneFieldFactory.apply(val, false)); } public void testIpTermsAggregator() throws Exception { @@ -808,9 +798,13 @@ public void testIpTermsAggregator() throws Exception { termsAggregator(ValueType.IP, fieldType, i -> base[0] = InetAddressPoint.nextUp(base[0]), comparator, luceneFieldFactory); } - private void termsAggregator(ValueType valueType, MappedFieldType fieldType, - Function valueFactory, Comparator keyComparator, - BiFunction> luceneFieldFactory) throws Exception { + private void termsAggregator( + ValueType valueType, + MappedFieldType fieldType, + Function valueFactory, + Comparator keyComparator, + BiFunction> luceneFieldFactory + ) throws Exception { final Map counts = new HashMap<>(); final Map filteredCounts = new HashMap<>(); int numTerms = scaledRandomIntBetween(8, 128); @@ -884,8 +878,7 @@ private void termsAggregator(ValueType valueType, MappedFieldType fieldType, String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); logger.info("bucket_order={} size={} execution_hint={}", bucketOrder, size, executionHint); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name") - .userValueTypeHint(valueType) + AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(valueType) .executionHint(executionHint) .size(size) .shardSize(size) @@ -900,7 +893,7 @@ private void termsAggregator(ValueType valueType, MappedFieldType fieldType, Terms result = reduce(aggregator, context.bigArrays()); assertEquals(size, result.getBuckets().size()); for (int i = 0; i < size; i++) { - Map.Entry expected = expectedBuckets.get(i); + Map.Entry expected = expectedBuckets.get(i); Terms.Bucket actual = result.getBuckets().get(i); if (valueType == ValueType.IP) { assertEquals(String.valueOf(expected.getKey()).substring(1), actual.getKey()); @@ -913,12 +906,13 @@ private void termsAggregator(ValueType valueType, MappedFieldType fieldType, if (multiValued == false) { MappedFieldType filterFieldType = new KeywordFieldMapper.KeywordFieldType("include"); aggregationBuilder = new FilterAggregationBuilder("_name1", QueryBuilders.termQuery("include", "yes")); - aggregationBuilder.subAggregation(new TermsAggregationBuilder("_name2") - .userValueTypeHint(valueType) - .executionHint(executionHint) - .size(numTerms) - .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) - .field("field")); + aggregationBuilder.subAggregation( + new TermsAggregationBuilder("_name2").userValueTypeHint(valueType) + .executionHint(executionHint) + .size(numTerms) + .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) + .field("field") + ); context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType, filterFieldType); aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); @@ -935,7 +929,7 @@ private void termsAggregator(ValueType valueType, MappedFieldType fieldType, for (Terms.Bucket actual : result.getBuckets()) { Integer expectedCount; if (valueType == ValueType.IP) { - expectedCount = filteredCounts.get(InetAddresses.forString((String)actual.getKey())); + expectedCount = filteredCounts.get(InetAddresses.forString((String) actual.getKey())); } else { expectedCount = filteredCounts.get(actual.getKey()); } @@ -947,9 +941,12 @@ private void termsAggregator(ValueType valueType, MappedFieldType fieldType, } } - private void termsAggregatorWithNestedMaxAgg(ValueType valueType, MappedFieldType fieldType, - Function valueFactory, - Function> luceneFieldFactory) throws Exception { + private void termsAggregatorWithNestedMaxAgg( + ValueType valueType, + MappedFieldType fieldType, + Function valueFactory, + Function> luceneFieldFactory + ) throws Exception { final Map counts = new HashMap<>(); int numTerms = scaledRandomIntBetween(8, 128); for (int i = 0; i < numTerms; i++) { @@ -978,11 +975,15 @@ private void termsAggregatorWithNestedMaxAgg(ValueType valueType, MappedFiel String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values()); - logger.info("bucket_order={} size={} execution_hint={}, collect_mode={}", - bucketOrder, size, executionHint, collectionMode); + logger.info( + "bucket_order={} size={} execution_hint={}, collect_mode={}", + bucketOrder, + size, + executionHint, + collectionMode + ); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name") - .userValueTypeHint(valueType) + AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(valueType) .executionHint(executionHint) .collectMode(collectionMode) .size(size) @@ -991,8 +992,7 @@ private void termsAggregatorWithNestedMaxAgg(ValueType valueType, MappedFiel .order(bucketOrder) .subAggregation(AggregationBuilders.max("_max").field("value")); - MappedFieldType fieldType2 - = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.LONG); AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType, fieldType2); Aggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); @@ -1001,7 +1001,7 @@ private void termsAggregatorWithNestedMaxAgg(ValueType valueType, MappedFiel Terms result = reduce(aggregator, context.bigArrays()); assertEquals(size, result.getBuckets().size()); for (int i = 0; i < size; i++) { - Map.Entry expected = expectedBuckets.get(i); + Map.Entry expected = expectedBuckets.get(i); Terms.Bucket actual = result.getBuckets().get(i); assertEquals(expected.getKey(), actual.getKey()); } @@ -1014,14 +1014,11 @@ public void testEmpty() throws Exception { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("string"); - MappedFieldType fieldType2 - = new NumberFieldMapper.NumberFieldType("long", NumberFieldMapper.NumberType.LONG); - MappedFieldType fieldType3 - = new NumberFieldMapper.NumberFieldType("double", NumberFieldMapper.NumberType.DOUBLE); + MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("long", NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType3 = new NumberFieldMapper.NumberFieldType("double", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name") - .userValueTypeHint(ValueType.STRING) + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.STRING) .field("string"); AggregationContext context = createAggregationContext(indexSearcher, null, fieldType1); Aggregator aggregator = createAggregator(aggregationBuilder, context); @@ -1061,11 +1058,10 @@ public void testUnmapped() throws Exception { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); - ValueType[] valueTypes = new ValueType[]{ValueType.STRING, ValueType.LONG, ValueType.DOUBLE}; - String[] fieldNames = new String[]{"string", "long", "double"}; + ValueType[] valueTypes = new ValueType[] { ValueType.STRING, ValueType.LONG, ValueType.DOUBLE }; + String[] fieldNames = new String[] { "string", "long", "double" }; for (int i = 0; i < fieldNames.length; i++) { - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name") - .userValueTypeHint(valueTypes[i]) + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(valueTypes[i]) .field(fieldNames[i]); AggregationContext context = createAggregationContext(indexSearcher, null); Aggregator aggregator = createAggregator(aggregationBuilder, context); @@ -1095,15 +1091,14 @@ public void testUnmappedWithMissing() throws Exception { MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("unrelated_value"); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - ValueType[] valueTypes = new ValueType[]{ValueType.STRING, ValueType.LONG, ValueType.DOUBLE}; - String[] fieldNames = new String[]{"string", "long", "double"}; - Object[] missingValues = new Object[]{"abc", 19L, 19.2}; - + ValueType[] valueTypes = new ValueType[] { ValueType.STRING, ValueType.LONG, ValueType.DOUBLE }; + String[] fieldNames = new String[] { "string", "long", "double" }; + Object[] missingValues = new Object[] { "abc", 19L, 19.2 }; for (int i = 0; i < fieldNames.length; i++) { - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name") - .userValueTypeHint(valueTypes[i]) - .field(fieldNames[i]).missing(missingValues[i]); + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(valueTypes[i]) + .field(fieldNames[i]) + .missing(missingValues[i]); AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType1); Aggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); @@ -1135,10 +1130,8 @@ public void testRangeField() throws Exception { try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, rangeType); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name") .field(fieldName); - expectThrows(IllegalArgumentException.class, () -> { - createAggregator(aggregationBuilder, indexSearcher, fieldType); - }); + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").field(fieldName); + expectThrows(IllegalArgumentException.class, () -> { createAggregator(aggregationBuilder, indexSearcher, fieldType); }); } } } @@ -1155,18 +1148,24 @@ public void testGeoPointField() throws Exception { try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name") .field(field); - expectThrows(IllegalArgumentException.class, () -> { - createAggregator(aggregationBuilder, indexSearcher, fieldType); - }); + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").field(field); + expectThrows(IllegalArgumentException.class, () -> { createAggregator(aggregationBuilder, indexSearcher, fieldType); }); } } } } public void testIpField() throws Exception { - MappedFieldType fieldType - = new IpFieldMapper.IpFieldType("field", randomBoolean(), false, true, null, null, Collections.emptyMap(), false); + MappedFieldType fieldType = new IpFieldMapper.IpFieldType( + "field", + randomBoolean(), + false, + true, + null, + null, + Collections.emptyMap(), + false + ); testCase(new TermsAggregationBuilder("_name").field("field"), new MatchAllDocsQuery(), iw -> { Document document = new Document(); InetAddress point = InetAddresses.forString("192.168.100.42"); @@ -1204,17 +1203,17 @@ public void testNestedTermsAgg() throws Exception { IndexSearcher indexSearcher = newIndexSearcher(indexReader); String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values()); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name1") - .userValueTypeHint(ValueType.STRING) + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name1").userValueTypeHint(ValueType.STRING) .executionHint(executionHint) .collectMode(collectionMode) .field("field1") .order(BucketOrder.key(true)) - .subAggregation(new TermsAggregationBuilder("_name2").userValueTypeHint(ValueType.STRING) - .executionHint(executionHint) - .collectMode(collectionMode) - .field("field2") - .order(BucketOrder.key(true)) + .subAggregation( + new TermsAggregationBuilder("_name2").userValueTypeHint(ValueType.STRING) + .executionHint(executionHint) + .collectMode(collectionMode) + .field("field2") + .order(BucketOrder.key(true)) ); AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType1, fieldType2); Aggregator aggregator = createAggregator(aggregationBuilder, context); @@ -1246,15 +1245,14 @@ public void testMixLongAndDouble() throws Exception { .executionHint(executionMode.toString()) .field("number") .order(BucketOrder.key(true)); - List aggs = new ArrayList<> (); + List aggs = new ArrayList<>(); int numLongs = randomIntBetween(1, 3); for (int i = 0; i < numLongs; i++) { final Directory dir; try (IndexReader reader = createIndexWithLongs()) { dir = ((DirectoryReader) reader).directory(); IndexSearcher searcher = new IndexSearcher(reader); - MappedFieldType fieldType = - new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); aggs.add(buildInternalAggregation(aggregationBuilder, fieldType, searcher)); } dir.close(); @@ -1265,15 +1263,17 @@ public void testMixLongAndDouble() throws Exception { try (IndexReader reader = createIndexWithDoubles()) { dir = ((DirectoryReader) reader).directory(); IndexSearcher searcher = new IndexSearcher(reader); - MappedFieldType fieldType = - new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.DOUBLE); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.DOUBLE); aggs.add(buildInternalAggregation(aggregationBuilder, fieldType, searcher)); } dir.close(); } InternalAggregation.ReduceContext ctx = InternalAggregation.ReduceContext.forFinalReduction( - new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()), - null, b -> {}, PipelineTree.EMPTY); + new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()), + null, + b -> {}, + PipelineTree.EMPTY + ); for (InternalAggregation internalAgg : aggs) { InternalAggregation mergedAggs = internalAgg.reduce(aggs, ctx); assertTrue(mergedAggs instanceof DoubleTerms); @@ -1308,24 +1308,21 @@ public void testGlobalAggregationWithScore() throws IOException { IndexSearcher indexSearcher = newIndexSearcher(indexReader); String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values()); - GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global") - .subAggregation( - new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) - .executionHint(executionHint) - .collectMode(collectionMode) - .field("keyword") - .order(BucketOrder.key(true)) - .subAggregation( - new TermsAggregationBuilder("sub_terms").userValueTypeHint(ValueType.STRING) - .executionHint(executionHint) - .collectMode(collectionMode) - .field("keyword").order(BucketOrder.key(true)) - .subAggregation( - new TopHitsAggregationBuilder("top_hits") - .storedField("_none_") - ) - ) - ); + GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( + new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) + .executionHint(executionHint) + .collectMode(collectionMode) + .field("keyword") + .order(BucketOrder.key(true)) + .subAggregation( + new TermsAggregationBuilder("sub_terms").userValueTypeHint(ValueType.STRING) + .executionHint(executionHint) + .collectMode(collectionMode) + .field("keyword") + .order(BucketOrder.key(true)) + .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")) + ) + ); MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword"); @@ -1335,7 +1332,7 @@ public void testGlobalAggregationWithScore() throws IOException { for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { InternalMultiBucketAggregation subTerms = bucket.getAggregations().get("sub_terms"); assertThat(subTerms.getBuckets().size(), equalTo(1)); - MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); + MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); InternalTopHits topHits = subBucket.getAggregations().get("top_hits"); assertThat(topHits.getHits().getHits().length, equalTo(1)); for (SearchHit hit : topHits.getHits()) { @@ -1359,26 +1356,32 @@ public void testWithNestedAggregations() throws IOException { } indexWriter.commit(); for (Aggregator.SubAggCollectionMode mode : Aggregator.SubAggCollectionMode.values()) { - for (boolean withScore : new boolean[]{true, false}) { - NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object") - .subAggregation(new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.LONG) + for (boolean withScore : new boolean[] { true, false }) { + NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object").subAggregation( + new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.LONG) .field("nested_value") // force the breadth_first mode .collectMode(mode) .order(BucketOrder.key(true)) .subAggregation( - new TopHitsAggregationBuilder("top_hits") - .sort(withScore ? new ScoreSortBuilder() : new FieldSortBuilder("_doc")) - .storedField("_none_") + new TopHitsAggregationBuilder("top_hits").sort( + withScore ? new ScoreSortBuilder() : new FieldSortBuilder("_doc") + ).storedField("_none_") ) - ); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("nested_value", NumberFieldMapper.NumberType.LONG); + ); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( + "nested_value", + NumberFieldMapper.NumberType.LONG + ); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { { - InternalNested result = searchAndReduce(newSearcher(indexReader, false, true), + InternalNested result = searchAndReduce( + newSearcher(indexReader, false, true), // match root document only - new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), nested, fieldType); + new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), + nested, + fieldType + ); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertNestedTopHitsScore(terms, withScore); } @@ -1386,9 +1389,13 @@ public void testWithNestedAggregations() throws IOException { { FilterAggregationBuilder filter = new FilterAggregationBuilder("filter", new MatchAllQueryBuilder()) .subAggregation(nested); - InternalFilter result = searchAndReduce(newSearcher(indexReader, false, true), + InternalFilter result = searchAndReduce( + newSearcher(indexReader, false, true), // match root document only - new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), filter, fieldType); + new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), + filter, + fieldType + ); InternalNested nestedResult = result.getAggregations().get("nested"); InternalMultiBucketAggregation terms = nestedResult.getAggregations().get("terms"); assertNestedTopHitsScore(terms, withScore); @@ -1405,29 +1412,35 @@ public void testHeisenpig() throws IOException { KeywordFieldType animalFieldType = new KeywordFieldType("str", randomBoolean(), true, Collections.emptyMap()); try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { - String[] tags = new String[] {"danger", "fluffiness"}; - indexWriter.addDocuments(generateAnimalDocsWithNested("1", animalFieldType, "sheep", tags, new int[] {1, 10})); - indexWriter.addDocuments(generateAnimalDocsWithNested("2", animalFieldType, "cow", tags, new int[] {3, 1})); - indexWriter.addDocuments(generateAnimalDocsWithNested("3", animalFieldType, "pig", tags, new int[] {100, 1})); + String[] tags = new String[] { "danger", "fluffiness" }; + indexWriter.addDocuments(generateAnimalDocsWithNested("1", animalFieldType, "sheep", tags, new int[] { 1, 10 })); + indexWriter.addDocuments(generateAnimalDocsWithNested("2", animalFieldType, "cow", tags, new int[] { 3, 1 })); + indexWriter.addDocuments(generateAnimalDocsWithNested("3", animalFieldType, "pig", tags, new int[] { 100, 1 })); indexWriter.commit(); - NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object") - .subAggregation( - new MaxAggregationBuilder("max_number").field("number") - ); - TermsAggregationBuilder terms = new TermsAggregationBuilder("str_terms") - .field("str") + NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object").subAggregation( + new MaxAggregationBuilder("max_number").field("number") + ); + TermsAggregationBuilder terms = new TermsAggregationBuilder("str_terms").field("str") .subAggregation(nested) .shardSize(10) .size(10) .order(BucketOrder.aggregation("nested>max_number", false)); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - StringTerms result = searchAndReduce(newSearcher(indexReader, false, true), + StringTerms result = searchAndReduce( + newSearcher(indexReader, false, true), // match root document only - Queries.newNonNestedFilter(), terms, animalFieldType, nestedFieldType); + Queries.newNonNestedFilter(), + terms, + animalFieldType, + nestedFieldType + ); assertThat(result.getBuckets().get(0).getKeyAsString(), equalTo("pig")); assertThat(result.getBuckets().get(0).docCount, equalTo(1L)); - assertThat(((InternalMax) (((InternalNested)result.getBuckets().get(0).getAggregations().get("nested")) - .getAggregations().get("max_number"))).getValue(), closeTo(100.0, 0.00001)); + assertThat( + ((InternalMax) (((InternalNested) result.getBuckets().get(0).getAggregations().get("nested")).getAggregations() + .get("max_number"))).getValue(), + closeTo(100.0, 0.00001) + ); } } } @@ -1444,12 +1457,10 @@ public void testSortingWithNestedAggregations() throws IOException { indexWriter.addDocuments(generateDocsWithNested(Integer.toString(i), i % 4, nestedValues)); } indexWriter.commit(); - NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object") - .subAggregation( - new MaxAggregationBuilder("max_val").field("nested_value") - ); - TermsAggregationBuilder terms = new TermsAggregationBuilder("terms") - .field("value") + NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object").subAggregation( + new MaxAggregationBuilder("max_val").field("nested_value") + ); + TermsAggregationBuilder terms = new TermsAggregationBuilder("terms").field("value") .subAggregation(nested) .shardSize(1) .size(1) @@ -1457,12 +1468,20 @@ public void testSortingWithNestedAggregations() throws IOException { MappedFieldType nestedFieldType = new NumberFieldMapper.NumberFieldType("nested_value", NumberFieldMapper.NumberType.LONG); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.LONG); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - LongTerms result = searchAndReduce(newSearcher(indexReader, false, true), + LongTerms result = searchAndReduce( + newSearcher(indexReader, false, true), // match root document only - new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), terms, fieldType, nestedFieldType); + new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), + terms, + fieldType, + nestedFieldType + ); assertThat(result.getBuckets().get(0).term, equalTo(3L)); - assertThat(((InternalMax) (((InternalNested)result.getBuckets().get(0).getAggregations().get("nested")) - .getAggregations().get("max_val"))).getValue(), closeTo(10.0, 0.00001)); + assertThat( + ((InternalMax) (((InternalNested) result.getBuckets().get(0).getAggregations().get("nested")).getAggregations() + .get("max_val"))).getValue(), + closeTo(10.0, 0.00001) + ); } } } @@ -1491,18 +1510,20 @@ public void testManySegmentsStillSingleton() throws IOException { new Field("str", new BytesRef("sheep"), KeywordFieldMapper.Defaults.FIELD_TYPE) ) ); - }, searcher -> debugTestCase( - builder, - new MatchAllDocsQuery(), - searcher, - (LongTerms result, Class impl, Map> debug) -> { - Map subDebug = debug.get("n.str"); - assertThat(subDebug, hasEntry("segments_with_single_valued_ords", 2)); - assertThat(subDebug, hasEntry("segments_with_multi_valued_ords", 0)); - }, - nFt, - strFt - )); + }, + searcher -> debugTestCase( + builder, + new MatchAllDocsQuery(), + searcher, + (LongTerms result, Class impl, Map> debug) -> { + Map subDebug = debug.get("n.str"); + assertThat(subDebug, hasEntry("segments_with_single_valued_ords", 2)); + assertThat(subDebug, hasEntry("segments_with_multi_valued_ords", 0)); + }, + nFt, + strFt + ) + ); } public void topLevelProfileTestCase( @@ -1535,8 +1556,7 @@ public void topLevelProfileTestCase( matchesMap().entry( "str", extraMatcher.apply( - matchesMap() - .entry("result_strategy", "terms") + matchesMap().entry("result_strategy", "terms") .entry("total_buckets", (long) count) .entry("segments_with_single_valued_ords", greaterThan(0)) .entry("segments_with_multi_valued_ords", 0) @@ -1583,11 +1603,9 @@ public void testLowCardinalityProfile() throws IOException { } public void testNumberToStringValueScript() throws IOException { - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("name") - .userValueTypeHint(ValueType.STRING) + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("name").userValueTypeHint(ValueType.STRING) .field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, STRING_SCRIPT_NAME, Collections.emptyMap())); @@ -1596,9 +1614,7 @@ public void testNumberToStringValueScript() throws IOException { for (int i = 0; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("number", i + 1))); } - }, (Consumer>) terms -> { - assertTrue(AggregationInspectionHelper.hasValue(terms)); - }, fieldType); + }, (Consumer>) terms -> { assertTrue(AggregationInspectionHelper.hasValue(terms)); }, fieldType); } public void testThreeLayerStringViaGlobalOrds() throws IOException { @@ -1629,9 +1645,13 @@ private void threeLayerStringTestCase(String executionHint) throws IOException { } try (IndexReader reader = maybeWrapReaderEs(writer.getReader())) { IndexSearcher searcher = newIndexSearcher(reader); - TermsAggregationBuilder request = new TermsAggregationBuilder("i").field("i").executionHint(executionHint) - .subAggregation(new TermsAggregationBuilder("j").field("j").executionHint(executionHint) - .subAggregation(new TermsAggregationBuilder("k").field("k").executionHint(executionHint))); + TermsAggregationBuilder request = new TermsAggregationBuilder("i").field("i") + .executionHint(executionHint) + .subAggregation( + new TermsAggregationBuilder("j").field("j") + .executionHint(executionHint) + .subAggregation(new TermsAggregationBuilder("k").field("k").executionHint(executionHint)) + ); StringTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, ift, jft, kft); for (int i = 0; i < 10; i++) { StringTerms.Bucket iBucket = result.getBucketByKey(Integer.toString(i)); @@ -1669,10 +1689,17 @@ public void testThreeLayerLong() throws IOException { try (IndexReader reader = maybeWrapReaderEs(writer.getReader())) { IndexSearcher searcher = newIndexSearcher(reader); TermsAggregationBuilder request = new TermsAggregationBuilder("i").field("i") - .subAggregation(new TermsAggregationBuilder("j").field("j") - .subAggregation(new TermsAggregationBuilder("k").field("k"))); - LongTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, - longField("i"), longField("j"), longField("k")); + .subAggregation( + new TermsAggregationBuilder("j").field("j").subAggregation(new TermsAggregationBuilder("k").field("k")) + ); + LongTerms result = searchAndReduce( + searcher, + new MatchAllDocsQuery(), + request, + longField("i"), + longField("j"), + longField("k") + ); for (int i = 0; i < 10; i++) { LongTerms.Bucket iBucket = result.getBucketByKey(Integer.toString(i)); assertThat(iBucket.getDocCount(), equalTo(100L)); @@ -1714,32 +1741,32 @@ public void testOrderByPipelineAggregation() throws Exception { try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); - BucketScriptPipelineAggregationBuilder bucketScriptAgg = bucketScript( - "script", new Script("2.718")); - TermsAggregationBuilder termsAgg = terms("terms") - .field("field") + BucketScriptPipelineAggregationBuilder bucketScriptAgg = bucketScript("script", new Script("2.718")); + TermsAggregationBuilder termsAgg = terms("terms").field("field") .userValueTypeHint(ValueType.STRING) .order(BucketOrder.aggregation("script", true)) .subAggregation(bucketScriptAgg); MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("field"); - AggregationExecutionException e = expectThrows(AggregationExecutionException.class, - () -> createAggregator(termsAgg, indexSearcher, fieldType)); - assertEquals("Invalid aggregation order path [script]. The provided aggregation [script] " + - "either does not exist, or is a pipeline aggregation and cannot be used to sort the buckets.", - e.getMessage()); + AggregationExecutionException e = expectThrows( + AggregationExecutionException.class, + () -> createAggregator(termsAgg, indexSearcher, fieldType) + ); + assertEquals( + "Invalid aggregation order path [script]. The provided aggregation [script] " + + "either does not exist, or is a pipeline aggregation and cannot be used to sort the buckets.", + e.getMessage() + ); } } } } public void testFormatWithMissing() throws IOException { - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("name") - .field("number") + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("name").field("number") .format("$###.00") .missing(randomFrom(42, "$42", 42.0)); @@ -1805,8 +1832,7 @@ public void testOrderByCardinality() throws IOException { } iw.close(); - try (DirectoryReader unwrapped = DirectoryReader.open(directory); - IndexReader indexReader = wrapDirectoryReader(unwrapped)) { + try (DirectoryReader unwrapped = DirectoryReader.open(directory); IndexReader indexReader = wrapDirectoryReader(unwrapped)) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); LongTerms terms = searchAndReduce( @@ -1925,11 +1951,13 @@ public void testWithFilterAndPreciseSize() throws IOException { * would trigger that bug. */ builder.size(2).order(BucketOrder.key(true)); - Query topLevel = new BooleanQuery.Builder() - .add(new TermQuery(new Term("k", "b")), Occur.SHOULD) + Query topLevel = new BooleanQuery.Builder().add(new TermQuery(new Term("k", "b")), Occur.SHOULD) .add(new TermQuery(new Term("k", "c")), Occur.SHOULD) .build(); - debugTestCase(builder, topLevel, buildIndex, + debugTestCase( + builder, + topLevel, + buildIndex, (StringTerms terms, Class impl, Map> debug) -> { assertThat(terms.getBuckets().stream().map(StringTerms.Bucket::getKey).collect(toList()), equalTo(List.of("b", "c"))); assertThat(terms.getBuckets().stream().map(StringTerms.Bucket::getDocCount).collect(toList()), equalTo(List.of(1L, 1L))); @@ -1959,7 +1987,8 @@ public void testWithFilterAndPreciseSize() throws IOException { ) ); }, - kft); + kft + ); } /** @@ -1978,9 +2007,7 @@ public void execute() { emit("cat"); } }; - BytesRef[] values = new BytesRef[] { - new BytesRef("stuff"), new BytesRef("more_stuff"), new BytesRef("other_stuff"), - }; + BytesRef[] values = new BytesRef[] { new BytesRef("stuff"), new BytesRef("more_stuff"), new BytesRef("other_stuff"), }; Query query = new StringScriptFieldTermQuery(new Script("dummy"), scriptFactory, "dummy", "cat", false); debugTestCase(new TermsAggregationBuilder("t").field("k"), query, iw -> { for (int d = 0; d < totalDocs; d++) { @@ -2034,9 +2061,7 @@ public void execute() { emit((String) getDoc().get("k").get(0)); } }; - BytesRef[] values = new BytesRef[] { - new BytesRef("stuff"), new BytesRef("more_stuff"), new BytesRef("other_stuff"), - }; + BytesRef[] values = new BytesRef[] { new BytesRef("stuff"), new BytesRef("more_stuff"), new BytesRef("other_stuff"), }; MappedFieldType keywordFt = new KeywordFieldType("k", true, true, Collections.emptyMap()); MappedFieldType dummyFt = new KeywordScriptFieldType("dummy", scriptFactory, new Script("test"), Map.of()); debugTestCase(new TermsAggregationBuilder("t").field("dummy"), new MatchAllDocsQuery(), iw -> { @@ -2067,6 +2092,7 @@ public void execute() { } private final SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); + private List generateDocsWithNested(String id, int value, int[] nestedValues) { List documents = new ArrayList<>(); @@ -2155,8 +2181,8 @@ private IndexReader createIndexWithDoubles() throws IOException { return DirectoryReader.open(directory); } - private InternalAggregation buildInternalAggregation(TermsAggregationBuilder builder, MappedFieldType fieldType, - IndexSearcher searcher) throws IOException { + private InternalAggregation buildInternalAggregation(TermsAggregationBuilder builder, MappedFieldType fieldType, IndexSearcher searcher) + throws IOException { TermsAggregator aggregator = createAggregator(builder, searcher, fieldType); aggregator.preCollection(); searcher.search(new MatchAllDocsQuery(), aggregator); @@ -2166,14 +2192,19 @@ private InternalAggregation buildInternalAggregation(TermsAggregationBuilder bui private T reduce(Aggregator agg, BigArrays bigArrays) throws IOException { // now do the final reduce - MultiBucketConsumerService.MultiBucketConsumer reduceBucketConsumer = - new MultiBucketConsumerService.MultiBucketConsumer(Integer.MAX_VALUE, - new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); + MultiBucketConsumerService.MultiBucketConsumer reduceBucketConsumer = new MultiBucketConsumerService.MultiBucketConsumer( + Integer.MAX_VALUE, + new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST) + ); InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( - bigArrays, getMockScriptService(), reduceBucketConsumer, PipelineTree.EMPTY); + bigArrays, + getMockScriptService(), + reduceBucketConsumer, + PipelineTree.EMPTY + ); @SuppressWarnings("unchecked") - T topLevel = (T) agg.buildTopLevel(); + T topLevel = (T) agg.buildTopLevel(); @SuppressWarnings("unchecked") T result = (T) topLevel.reduce(Collections.singletonList(topLevel), context); doAssertReducedMultiBucketConsumer(result, reduceBucketConsumer); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/GNDTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/GNDTests.java index 3cde741ab5ce6..640733345234c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/GNDTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/GNDTests.java @@ -30,11 +30,11 @@ public void testAssertions() { */ public void testGNDCornerCases() { GND gnd = new GND(true); - assertThat(gnd.getScore(0, randomIntBetween(1, 2), 0, randomIntBetween(2,3)), equalTo(0.0)); + assertThat(gnd.getScore(0, randomIntBetween(1, 2), 0, randomIntBetween(2, 3)), equalTo(0.0)); // the terms do not co-occur at all - should be 0 - assertThat(gnd.getScore(0, randomIntBetween(1, 2), randomIntBetween(2, 3), randomIntBetween(5,6)), equalTo(0.0)); + assertThat(gnd.getScore(0, randomIntBetween(1, 2), randomIntBetween(2, 3), randomIntBetween(5, 6)), equalTo(0.0)); // comparison between two terms that do not exist - probably not relevant - assertThat(gnd.getScore(0, 0, 0, randomIntBetween(1,2)), equalTo(0.0)); + assertThat(gnd.getScore(0, 0, 0, randomIntBetween(1, 2)), equalTo(0.0)); // terms co-occur perfectly - should be 1 assertThat(gnd.getScore(1, 1, 1, 1), equalTo(1.0)); gnd = new GND(false); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/MutualInformationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/MutualInformationTests.java index 655d283c10aed..4c6b04c3e283d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/MutualInformationTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/MutualInformationTests.java @@ -44,8 +44,7 @@ public void testScoreMutual() { long c = randomLong(); long d = randomLong(); score = heuristic.getScore(a, b, c, d); - } catch (IllegalArgumentException e) { - } + } catch (IllegalArgumentException e) {} assertThat(score, lessThanOrEqualTo(1.0)); assertThat(score, greaterThanOrEqualTo(0.0)); heuristic = new MutualInformation(false, true); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index 695d7ee463fa9..23fe564a26cb0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -53,8 +53,8 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase { protected static int numDocs; protected static int numUniqueGeoPoints; protected static GeoPoint[] singleValues, multiValues; - protected static GeoPoint singleTopLeft, singleBottomRight, multiTopLeft, multiBottomRight, - singleCentroid, multiCentroid, unmappedCentroid; + protected static GeoPoint singleTopLeft, singleBottomRight, multiTopLeft, multiBottomRight, singleCentroid, multiCentroid, + unmappedCentroid; protected static ObjectIntMap expectedDocCountsForGeoHash = null; protected static ObjectObjectMap expectedCentroidsForGeoHash = null; protected static final double GEOHASH_TOLERANCE = 1E-5D; @@ -62,9 +62,18 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { createIndex(UNMAPPED_IDX_NAME); - assertAcked(prepareCreate(IDX_NAME) - .setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point", - MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long", "tag", "type=keyword")); + assertAcked( + prepareCreate(IDX_NAME).setMapping( + SINGLE_VALUED_FIELD_NAME, + "type=geo_point", + MULTI_VALUED_FIELD_NAME, + "type=geo_point", + NUMBER_FIELD_NAME, + "type=long", + "tag", + "type=keyword" + ) + ); singleTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); singleBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); @@ -80,16 +89,14 @@ public void setupSuiteScopeCluster() throws Exception { expectedCentroidsForGeoHash = new ObjectObjectHashMap<>(numDocs * 2); singleValues = new GeoPoint[numUniqueGeoPoints]; - for (int i = 0 ; i < singleValues.length; i++) - { + for (int i = 0; i < singleValues.length; i++) { singleValues[i] = RandomGeoGenerator.randomPoint(random()); updateBoundsTopLeft(singleValues[i], singleTopLeft); updateBoundsBottomRight(singleValues[i], singleBottomRight); } multiValues = new GeoPoint[numUniqueGeoPoints]; - for (int i = 0 ; i < multiValues.length; i++) - { + for (int i = 0; i < multiValues.length; i++) { multiValues[i] = RandomGeoGenerator.randomPoint(random()); updateBoundsTopLeft(multiValues[i], multiTopLeft); updateBoundsBottomRight(multiValues[i], multiBottomRight); @@ -103,32 +110,53 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = 0; i < numDocs; i++) { singleVal = singleValues[i % numUniqueGeoPoints]; multiVal[0] = multiValues[i % numUniqueGeoPoints]; - multiVal[1] = multiValues[(i+1) % numUniqueGeoPoints]; - builders.add(client().prepareIndex(IDX_NAME).setSource(jsonBuilder() - .startObject() - .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat()) - .startArray(MULTI_VALUED_FIELD_NAME) - .startArray().value(multiVal[0].lon()).value(multiVal[0].lat()).endArray() - .startArray().value(multiVal[1].lon()).value(multiVal[1].lat()).endArray() - .endArray() - .field(NUMBER_FIELD_NAME, i) - .field("tag", "tag" + i) - .endObject())); - singleCentroid = singleCentroid.reset(singleCentroid.lat() + (singleVal.lat() - singleCentroid.lat()) / (i+1), - singleCentroid.lon() + (singleVal.lon() - singleCentroid.lon()) / (i+1)); - newMVLat = (multiVal[0].lat() + multiVal[1].lat())/2d; - newMVLon = (multiVal[0].lon() + multiVal[1].lon())/2d; - multiCentroid = multiCentroid.reset(multiCentroid.lat() + (newMVLat - multiCentroid.lat()) / (i+1), - multiCentroid.lon() + (newMVLon - multiCentroid.lon()) / (i+1)); + multiVal[1] = multiValues[(i + 1) % numUniqueGeoPoints]; + builders.add( + client().prepareIndex(IDX_NAME) + .setSource( + jsonBuilder().startObject() + .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat()) + .startArray(MULTI_VALUED_FIELD_NAME) + .startArray() + .value(multiVal[0].lon()) + .value(multiVal[0].lat()) + .endArray() + .startArray() + .value(multiVal[1].lon()) + .value(multiVal[1].lat()) + .endArray() + .endArray() + .field(NUMBER_FIELD_NAME, i) + .field("tag", "tag" + i) + .endObject() + ) + ); + singleCentroid = singleCentroid.reset( + singleCentroid.lat() + (singleVal.lat() - singleCentroid.lat()) / (i + 1), + singleCentroid.lon() + (singleVal.lon() - singleCentroid.lon()) / (i + 1) + ); + newMVLat = (multiVal[0].lat() + multiVal[1].lat()) / 2d; + newMVLon = (multiVal[0].lon() + multiVal[1].lon()) / 2d; + multiCentroid = multiCentroid.reset( + multiCentroid.lat() + (newMVLat - multiCentroid.lat()) / (i + 1), + multiCentroid.lon() + (newMVLon - multiCentroid.lon()) / (i + 1) + ); } assertAcked(prepareCreate(EMPTY_IDX_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point")); - assertAcked(prepareCreate(DATELINE_IDX_NAME) - .setMapping(SINGLE_VALUED_FIELD_NAME, - "type=geo_point", MULTI_VALUED_FIELD_NAME, - "type=geo_point", NUMBER_FIELD_NAME, - "type=long", "tag", "type=keyword")); + assertAcked( + prepareCreate(DATELINE_IDX_NAME).setMapping( + SINGLE_VALUED_FIELD_NAME, + "type=geo_point", + MULTI_VALUED_FIELD_NAME, + "type=geo_point", + NUMBER_FIELD_NAME, + "type=long", + "tag", + "type=keyword" + ) + ); GeoPoint[] geoValues = new GeoPoint[5]; geoValues[0] = new GeoPoint(38, 178); @@ -138,43 +166,60 @@ public void setupSuiteScopeCluster() throws Exception { geoValues[4] = new GeoPoint(-11, 178); for (int i = 0; i < 5; i++) { - builders.add(client().prepareIndex(DATELINE_IDX_NAME).setSource(jsonBuilder() - .startObject() - .array(SINGLE_VALUED_FIELD_NAME, geoValues[i].lon(), geoValues[i].lat()) - .field(NUMBER_FIELD_NAME, i) - .field("tag", "tag" + i) - .endObject())); + builders.add( + client().prepareIndex(DATELINE_IDX_NAME) + .setSource( + jsonBuilder().startObject() + .array(SINGLE_VALUED_FIELD_NAME, geoValues[i].lon(), geoValues[i].lat()) + .field(NUMBER_FIELD_NAME, i) + .field("tag", "tag" + i) + .endObject() + ) + ); } - assertAcked(prepareCreate(HIGH_CARD_IDX_NAME).setSettings(Settings.builder().put("number_of_shards", 2)) - .setMapping(SINGLE_VALUED_FIELD_NAME, - "type=geo_point", MULTI_VALUED_FIELD_NAME, - "type=geo_point", NUMBER_FIELD_NAME, + assertAcked( + prepareCreate(HIGH_CARD_IDX_NAME).setSettings(Settings.builder().put("number_of_shards", 2)) + .setMapping( + SINGLE_VALUED_FIELD_NAME, + "type=geo_point", + MULTI_VALUED_FIELD_NAME, + "type=geo_point", + NUMBER_FIELD_NAME, "type=long,store=true", - "tag", "type=keyword")); + "tag", + "type=keyword" + ) + ); for (int i = 0; i < 2000; i++) { singleVal = singleValues[i % numUniqueGeoPoints]; - builders.add(client().prepareIndex(HIGH_CARD_IDX_NAME).setSource(jsonBuilder() - .startObject() - .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat()) - .startArray(MULTI_VALUED_FIELD_NAME) - .startArray() - .value(multiValues[i % numUniqueGeoPoints].lon()) - .value(multiValues[i % numUniqueGeoPoints].lat()) - .endArray() - .startArray() - .value(multiValues[(i + 1) % numUniqueGeoPoints].lon()) - .value(multiValues[(i + 1) % numUniqueGeoPoints].lat()) - .endArray() - .endArray() - .field(NUMBER_FIELD_NAME, i) - .field("tag", "tag" + i) - .endObject())); - updateGeohashBucketsCentroid(singleVal); + builders.add( + client().prepareIndex(HIGH_CARD_IDX_NAME) + .setSource( + jsonBuilder().startObject() + .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat()) + .startArray(MULTI_VALUED_FIELD_NAME) + .startArray() + .value(multiValues[i % numUniqueGeoPoints].lon()) + .value(multiValues[i % numUniqueGeoPoints].lat()) + .endArray() + .startArray() + .value(multiValues[(i + 1) % numUniqueGeoPoints].lon()) + .value(multiValues[(i + 1) % numUniqueGeoPoints].lat()) + .endArray() + .endArray() + .field(NUMBER_FIELD_NAME, i) + .field("tag", "tag" + i) + .endObject() + ) + ); + updateGeohashBucketsCentroid(singleVal); } - builders.add(client().prepareIndex(IDX_ZERO_NAME).setSource( - jsonBuilder().startObject().array(SINGLE_VALUED_FIELD_NAME, 0.0, 1.0).endObject())); + builders.add( + client().prepareIndex(IDX_ZERO_NAME) + .setSource(jsonBuilder().startObject().array(SINGLE_VALUED_FIELD_NAME, 0.0, 1.0).endObject()) + ); assertAcked(prepareCreate(IDX_ZERO_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point")); indexRandom(true, builders); @@ -184,8 +229,11 @@ public void setupSuiteScopeCluster() throws Exception { // value for NUMBER_FIELD_NAME. This will check that after random indexing each document only has 1 value for // NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting // more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type. - SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME).addStoredField(NUMBER_FIELD_NAME) - .addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME).order(SortOrder.ASC)).setSize(5000).get(); + SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME) + .addStoredField(NUMBER_FIELD_NAME) + .addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME).order(SortOrder.ASC)) + .setSize(5000) + .get(); assertSearchResponse(response); long totalHits = response.getHits().getTotalHits().value; XContentBuilder builder = XContentFactory.jsonBuilder(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java index 0b1c0fb3f9caf..cd1616e01d32e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java @@ -12,8 +12,8 @@ import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; -public abstract class AbstractNumericMetricTestCase> - extends BaseAggregationTestCase { +public abstract class AbstractNumericMetricTestCase> extends + BaseAggregationTestCase { @Override protected final AF createTestAggregatorBuilder() { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesTestCase.java index 6b1b5514298a7..00ce4c64bb6dd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesTestCase.java @@ -29,8 +29,8 @@ import static java.util.stream.Collectors.toList; import static org.hamcrest.Matchers.equalTo; -public abstract class AbstractPercentilesTestCase> - extends InternalAggregationTestCase { +public abstract class AbstractPercentilesTestCase> extends InternalAggregationTestCase< + T> { @Override protected T createTestInstance(String name, Map metadata) { return createTestInstance(name, metadata, randomBoolean(), randomNumericDocValueFormat(), randomPercents(false)); @@ -53,8 +53,14 @@ private T createTestInstance(String name, Map metadata, boolean return createTestInstance(name, metadata, keyed, format, percents, values); } - protected abstract T createTestInstance(String name, Map metadata, - boolean keyed, DocValueFormat format, double[] percents, double[] values); + protected abstract T createTestInstance( + String name, + Map metadata, + boolean keyed, + DocValueFormat format, + double[] percents, + double[] values + ); protected abstract Class implementationClass(); @@ -89,7 +95,7 @@ protected Predicate excludePathsFromXContentInsertion() { protected abstract void assertPercentile(T agg, Double value); public void testEmptyRanksXContent() throws IOException { - double[] percents = new double[]{1,2,3}; + double[] percents = new double[] { 1, 2, 3 }; boolean keyed = randomBoolean(); DocValueFormat docValueFormat = randomNumericDocValueFormat(); @@ -106,30 +112,30 @@ public void testEmptyRanksXContent() throws IOException { builder.endObject(); String expected; if (keyed) { - expected = "{\n" + - " \"values\" : {\n" + - " \"1.0\" : null,\n" + - " \"2.0\" : null,\n" + - " \"3.0\" : null\n" + - " }\n" + - "}"; + expected = "{\n" + + " \"values\" : {\n" + + " \"1.0\" : null,\n" + + " \"2.0\" : null,\n" + + " \"3.0\" : null\n" + + " }\n" + + "}"; } else { - expected = "{\n" + - " \"values\" : [\n" + - " {\n" + - " \"key\" : 1.0,\n" + - " \"value\" : null\n" + - " },\n" + - " {\n" + - " \"key\" : 2.0,\n" + - " \"value\" : null\n" + - " },\n" + - " {\n" + - " \"key\" : 3.0,\n" + - " \"value\" : null\n" + - " }\n" + - " ]\n" + - "}"; + expected = "{\n" + + " \"values\" : [\n" + + " {\n" + + " \"key\" : 1.0,\n" + + " \"value\" : null\n" + + " },\n" + + " {\n" + + " \"key\" : 2.0,\n" + + " \"value\" : null\n" + + " },\n" + + " {\n" + + " \"key\" : 3.0,\n" + + " \"value\" : null\n" + + " }\n" + + " ]\n" + + "}"; } assertThat(Strings.toString(builder), equalTo(expected)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AdjacencyMatrixTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AdjacencyMatrixTests.java index 0e6148b424e15..6b407b3776919 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AdjacencyMatrixTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AdjacencyMatrixTests.java @@ -28,8 +28,7 @@ protected AdjacencyMatrixAggregationBuilder createTestAggregatorBuilder() { for (String key : randomUnique(() -> randomAlphaOfLengthBetween(1, 20), size)) { filters.put(key, QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20))); } - factory = new AdjacencyMatrixAggregationBuilder(randomAlphaOfLengthBetween(1, 20), filters) - .separator(randomFrom("&","+","\t")); + factory = new AdjacencyMatrixAggregationBuilder(randomAlphaOfLengthBetween(1, 20), filters).separator(randomFrom("&", "+", "\t")); return factory; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java index d1691e41418c3..754c14b9ca87b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java @@ -23,8 +23,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.script.MockScriptEngine; @@ -114,10 +114,12 @@ protected ScriptService getMockScriptService() { Map, Object>> nonDeterministicScripts = new HashMap<>(); nonDeterministicScripts.put(RANDOM_SCRIPT, vars -> AvgAggregatorTests.randomDouble()); - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, + MockScriptEngine scriptEngine = new MockScriptEngine( + MockScriptEngine.NAME, scripts, nonDeterministicScripts, - Collections.emptyMap()); + Collections.emptyMap() + ); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); @@ -188,7 +190,7 @@ public void testQueryFiltersAll() throws IOException { public void testSummationAccuracy() throws IOException { // Summing up a normal array and expect an accurate value - double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7}; + double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifyAvgOfDoubles(values, 0.9, 0d); // Summing up an array which contains NaN and infinities and expect a result same as naive summation @@ -242,25 +244,21 @@ public void testUnmappedWithMissingField() throws IOException { private void verifyAvgOfDoubles(double[] values, double expected, double delta) throws IOException { AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("number"); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.DOUBLE); - testAggregation(aggregationBuilder, new MatchAllDocsQuery(), - iw -> { - List> docs = new ArrayList<>(); - for (double value : values) { - docs.add(List.of(new NumericDocValuesField("number", NumericUtils.doubleToSortableLong(value)))); - } - /* - * Use add documents to force us to collect from a single segment - * so we don't break the collection across the shrads. We can't do - * *that* because we don't bring back the compensations for the sum - * back in the shard results. If we don't bring back the compensations - * errors can creep in. Not big errors, but big enough to upset this - * test. - */ - iw.addDocuments(docs); - }, - avg -> assertEquals(expected, avg.getValue(), delta), - fieldType - ); + testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + List> docs = new ArrayList<>(); + for (double value : values) { + docs.add(List.of(new NumericDocValuesField("number", NumericUtils.doubleToSortableLong(value)))); + } + /* + * Use add documents to force us to collect from a single segment + * so we don't break the collection across the shrads. We can't do + * *that* because we don't bring back the compensations for the sum + * back in the shard results. If we don't bring back the compensations + * errors can creep in. Not big errors, but big enough to upset this + * test. + */ + iw.addDocuments(docs); + }, avg -> assertEquals(expected, avg.getValue(), delta), fieldType); } public void testSingleValuedFieldPartiallyUnmapped() throws IOException { @@ -280,7 +278,6 @@ public void testSingleValuedFieldPartiallyUnmapped() throws IOException { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("number"); @@ -315,8 +312,7 @@ public void testSingleValuedField() throws IOException { public void testSingleValuedField_WithFormatter() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); - AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name") - .format("#") + AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").format("#") .field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); @@ -326,7 +322,7 @@ public void testSingleValuedField_WithFormatter() throws IOException { iw.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } }, avg -> { - assertEquals((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10, avg.getValue(),0); + assertEquals((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); assertEquals("6", avg.getValueAsString()); }, fieldType); @@ -335,8 +331,7 @@ public void testSingleValuedField_WithFormatter() throws IOException { public void testSingleValuedFieldWithValueScript() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); - AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name") - .field("value") + AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -353,8 +348,9 @@ public void testSingleValuedFieldWithValueScript() throws IOException { public void testScriptSingleValued() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); - AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_FIELD_SCRIPT, Collections.emptyMap())); + AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_FIELD_SCRIPT, Collections.emptyMap()) + ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -374,8 +370,9 @@ public void testScriptSingleValuedWithParams() throws IOException { params.put("inc", 1); params.put("field", "value"); - AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params)); + AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params) + ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -383,7 +380,7 @@ public void testScriptSingleValuedWithParams() throws IOException { iw.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } }, avg -> { - assertEquals((double) (2+3+4+5+6+7+8+9+10+11) / 10, avg.getValue(), 0); + assertEquals((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); }, fieldType); } @@ -398,7 +395,7 @@ public void testMultiValuedField() throws IOException { iw.addDocument(document); } }, avg -> { - assertEquals((2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12) / 20, avg.getValue(), 0); + assertEquals((2 + 3 + 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12) / 20, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } @@ -406,8 +403,9 @@ public void testMultiValuedField() throws IOException { public void testScriptMultiValued() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); - AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_VALUES_FIELD_SCRIPT, Collections.emptyMap())); + AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_VALUES_FIELD_SCRIPT, Collections.emptyMap()) + ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -418,7 +416,11 @@ public void testScriptMultiValued() throws IOException { iw.addDocument(document); } }, avg -> { - assertEquals((double) (2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12) / 20, avg.getValue(), 0); + assertEquals( + (double) (2 + 3 + 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12) / 20, + avg.getValue(), + 0 + ); assertTrue(AggregationInspectionHelper.hasValue(avg)); }, fieldType); } @@ -430,8 +432,9 @@ public void testScriptMultiValuedWithParams() throws Exception { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); - AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params)); + AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params) + ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -442,7 +445,11 @@ public void testScriptMultiValuedWithParams() throws Exception { iw.addDocument(document); } }, avg -> { - assertEquals((double) (3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12+12+13) / 20, avg.getValue(), 0); + assertEquals( + (double) (3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12 + 12 + 13) / 20, + avg.getValue(), + 0 + ); assertTrue(AggregationInspectionHelper.hasValue(avg)); }, fieldType); } @@ -451,8 +458,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws IOException MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); Map params = Collections.singletonMap("inc", 1); - AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name") - .field("value") + AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, params)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -461,7 +467,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws IOException iw.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } }, avg -> { - assertEquals((double) (2+3+4+5+6+7+8+9+10+11) / 10, avg.getValue(), 0); + assertEquals((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); }, fieldType); } @@ -470,8 +476,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); Map params = Collections.singletonMap("inc", 1); - AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name") - .field("values") + AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("values") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, params)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -483,7 +488,11 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws IOException { iw.addDocument(document); } }, avg -> { - assertEquals((double) (3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12+12+13) / 20, avg.getValue(), 0); + assertEquals( + (double) (3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12 + 12 + 13) / 20, + avg.getValue(), + 0 + ); assertTrue(AggregationInspectionHelper.hasValue(avg)); }, fieldType); } @@ -491,8 +500,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws IOException { public void testMultiValuedFieldWithValueScript() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); - AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name") - .field("values") + AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("values") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -504,7 +512,11 @@ public void testMultiValuedFieldWithValueScript() throws IOException { iw.addDocument(document); } }, avg -> { - assertEquals((double) (2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12) / 20, avg.getValue(), 0); + assertEquals( + (double) (2 + 3 + 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12) / 20, + avg.getValue(), + 0 + ); assertTrue(AggregationInspectionHelper.hasValue(avg)); }, fieldType); } @@ -515,8 +527,9 @@ public void testOrderByEmptyAggregation() throws IOException { AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.NUMERIC) .field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>avg", true))) - .subAggregation(AggregationBuilders.filter("filter", termQuery("value", 100)) - .subAggregation(AggregationBuilders.avg("avg").field("value"))); + .subAggregation( + AggregationBuilders.filter("filter", termQuery("value", 100)).subAggregation(AggregationBuilders.avg("avg").field("value")) + ); Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); @@ -559,9 +572,8 @@ public void testOrderByEmptyAggregation() throws IOException { directory.close(); } - private void testAggregation(Query query, - CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testAggregation(Query query, CheckedConsumer buildIndex, Consumer verify) + throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("number"); testAggregation(aggregationBuilder, query, buildIndex, verify, fieldType); @@ -599,8 +611,7 @@ public void testCacheAggregation() throws IOException { IndexSearcher indexSearcher = newSearcher(multiReader, true, true); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); - AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("avg") - .field("value"); + AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("avg").field("value"); AggregationContext context = createAggregationContext(indexSearcher, null, fieldType); AvgAggregator aggregator = createAggregator(aggregationBuilder, context); @@ -645,8 +656,7 @@ public void testScriptCaching() throws IOException { IndexSearcher indexSearcher = newSearcher(multiReader, true, true); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); - AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("avg") - .field("value") + AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("avg").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); AggregationContext context = createAggregationContext(indexSearcher, null, fieldType); @@ -664,8 +674,7 @@ public void testScriptCaching() throws IOException { // Test that an aggregation using a deterministic script gets cached assertTrue(context.isCacheable()); - aggregationBuilder = new AvgAggregationBuilder("avg") - .field("value") + aggregationBuilder = new AvgAggregationBuilder("avg").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, RANDOM_SCRIPT, Collections.emptyMap())); context = createAggregationContext(indexSearcher, null, fieldType); @@ -691,11 +700,7 @@ public void testScriptCaching() throws IOException { @Override protected List getSupportedValuesSourceTypes() { - return List.of( - CoreValuesSourceType.NUMERIC, - CoreValuesSourceType.BOOLEAN, - CoreValuesSourceType.DATE - ); + return List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.DATE); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java index e3cc35f9c4013..26065a72fdf00 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java @@ -16,8 +16,8 @@ import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.RangeFieldMapper; @@ -51,7 +51,7 @@ public void testRangeFieldValues() throws IOException { final String fieldName = "rangeField"; MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, rangeType); final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("_name").field(fieldName); - testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(singleton(range1))))); iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(singleton(range1))))); iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(singleton(range2))))); @@ -94,10 +94,8 @@ public void testSomeMatchesNumericDocValues() throws IOException { public void testQueryFiltering() throws IOException { testAggregation(IntPoint.newRangeQuery("number", 0, 5), iw -> { - iw.addDocument(Arrays.asList(new IntPoint("number", 7), - new SortedNumericDocValuesField("number", 7))); - iw.addDocument(Arrays.asList(new IntPoint("number", 1), - new SortedNumericDocValuesField("number", 1))); + iw.addDocument(Arrays.asList(new IntPoint("number", 7), new SortedNumericDocValuesField("number", 7))); + iw.addDocument(Arrays.asList(new IntPoint("number", 1), new SortedNumericDocValuesField("number", 1))); }, card -> { assertEquals(1, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); @@ -106,10 +104,8 @@ public void testQueryFiltering() throws IOException { public void testQueryFiltersAll() throws IOException { testAggregation(IntPoint.newRangeQuery("number", -1, 0), iw -> { - iw.addDocument(Arrays.asList(new IntPoint("number", 7), - new SortedNumericDocValuesField("number", 7))); - iw.addDocument(Arrays.asList(new IntPoint("number", 1), - new SortedNumericDocValuesField("number", 1))); + iw.addDocument(Arrays.asList(new IntPoint("number", 7), new SortedNumericDocValuesField("number", 7))); + iw.addDocument(Arrays.asList(new IntPoint("number", 1), new SortedNumericDocValuesField("number", 1))); }, card -> { assertEquals(0.0, card.getValue(), 0); assertFalse(AggregationInspectionHelper.hasValue(card)); @@ -117,8 +113,7 @@ public void testQueryFiltersAll() throws IOException { } public void testUnmappedMissingString() throws IOException { - CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name") - .field("number").missing("🍌🍌🍌"); + CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").field("number").missing("🍌🍌🍌"); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("unrelatedField", 7))); @@ -131,8 +126,7 @@ public void testUnmappedMissingString() throws IOException { } public void testUnmappedMissingNumber() throws IOException { - CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name") - .field("number").missing(1234); + CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").field("number").missing(1234); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("unrelatedField", 7))); @@ -145,8 +139,8 @@ public void testUnmappedMissingNumber() throws IOException { } public void testUnmappedMissingGeoPoint() throws IOException { - CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name") - .field("number").missing(new GeoPoint(42.39561, -71.13051)); + CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").field("number") + .missing(new GeoPoint(42.39561, -71.13051)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("unrelatedField", 7))); @@ -158,8 +152,11 @@ public void testUnmappedMissingGeoPoint() throws IOException { }); } - private void testAggregation(Query query, CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testAggregation( + Query query, + CheckedConsumer buildIndex, + Consumer verify + ) throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("_name").field("number"); testAggregation(aggregationBuilder, query, buildIndex, verify, fieldType); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CompensatedSumTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CompensatedSumTests.java index d86a3bdf6d108..40d36bec35ee1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CompensatedSumTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CompensatedSumTests.java @@ -66,7 +66,7 @@ public void testDelta() { public void testInfiniteAndNaN() { CompensatedSum compensatedResult1 = new CompensatedSum(0, 0); - double[] doubles = {Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN}; + double[] doubles = { Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN }; for (double d : doubles) { compensatedResult1.add(d); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java index 5e1a02ddd0cae..3780ae9bf8394 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java @@ -28,160 +28,181 @@ public class ExtendedStatsAggregatorTests extends AggregatorTestCase { private static final double TOLERANCE = 1e-5; - // TODO: Add script test cases. Should fail with defaultValuesSourceType() commented out. + // TODO: Add script test cases. Should fail with defaultValuesSourceType() commented out. public void testEmpty() throws IOException { - MappedFieldType ft = - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); - testCase(ft, iw -> {}, - stats -> { - assertEquals(0d, stats.getCount(), 0); - assertEquals(0d, stats.getSum(), 0); - assertEquals(Float.NaN, stats.getAvg(), 0); - assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0); - assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0); - assertEquals(Double.NaN, stats.getVariance(), 0); - assertEquals(Double.NaN, stats.getVariancePopulation(), 0); - assertEquals(Double.NaN, stats.getVarianceSampling(), 0); - assertEquals(Double.NaN, stats.getStdDeviation(), 0); - assertEquals(Double.NaN, stats.getStdDeviationPopulation(), 0); - assertEquals(Double.NaN, stats.getStdDeviationSampling(), 0); - assertEquals(0d, stats.getSumOfSquares(), 0); - assertFalse(AggregationInspectionHelper.hasValue(stats)); - } - ); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); + testCase(ft, iw -> {}, stats -> { + assertEquals(0d, stats.getCount(), 0); + assertEquals(0d, stats.getSum(), 0); + assertEquals(Float.NaN, stats.getAvg(), 0); + assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0); + assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0); + assertEquals(Double.NaN, stats.getVariance(), 0); + assertEquals(Double.NaN, stats.getVariancePopulation(), 0); + assertEquals(Double.NaN, stats.getVarianceSampling(), 0); + assertEquals(Double.NaN, stats.getStdDeviation(), 0); + assertEquals(Double.NaN, stats.getStdDeviationPopulation(), 0); + assertEquals(Double.NaN, stats.getStdDeviationSampling(), 0); + assertEquals(0d, stats.getSumOfSquares(), 0); + assertFalse(AggregationInspectionHelper.hasValue(stats)); + }); } public void testRandomDoubles() throws IOException { - MappedFieldType ft = - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); final ExtendedSimpleStatsAggregator expected = new ExtendedSimpleStatsAggregator(); - testCase(ft, - iw -> { - int numDocs = randomIntBetween(10, 50); - for (int i = 0; i < numDocs; i++) { - Document doc = new Document(); - int numValues = randomIntBetween(1, 5); - for (int j = 0; j < numValues; j++) { - double value = randomDoubleBetween(-100d, 100d, true); - long valueAsLong = NumericUtils.doubleToSortableLong(value); - doc.add(new SortedNumericDocValuesField("field", valueAsLong)); - expected.add(value); - } - iw.addDocument(doc); + testCase(ft, iw -> { + int numDocs = randomIntBetween(10, 50); + for (int i = 0; i < numDocs; i++) { + Document doc = new Document(); + int numValues = randomIntBetween(1, 5); + for (int j = 0; j < numValues; j++) { + double value = randomDoubleBetween(-100d, 100d, true); + long valueAsLong = NumericUtils.doubleToSortableLong(value); + doc.add(new SortedNumericDocValuesField("field", valueAsLong)); + expected.add(value); } - }, - stats -> { - assertEquals(expected.count, stats.getCount(), 0); - assertEquals(expected.sum, stats.getSum(), TOLERANCE); - assertEquals(expected.min, stats.getMin(), 0); - assertEquals(expected.max, stats.getMax(), 0); - assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); - assertEquals(expected.sumOfSqrs, stats.getSumOfSquares(), TOLERANCE); - assertEquals(expected.stdDev(), stats.getStdDeviation(), TOLERANCE); - assertEquals(expected.stdDevPopulation(), stats.getStdDeviationPopulation(), TOLERANCE); - assertEquals(expected.stdDevSampling(), stats.getStdDeviationSampling(), TOLERANCE); - assertEquals(expected.variance(), stats.getVariance(), TOLERANCE); - assertEquals(expected.variancePopulation(), stats.getVariancePopulation(), TOLERANCE); - assertEquals(expected.varianceSampling(), stats.getVarianceSampling(), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.LOWER, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.UPPER, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.LOWER_POPULATION, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER_POPULATION), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.UPPER_POPULATION, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER_POPULATION), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.LOWER_SAMPLING, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER_SAMPLING), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.UPPER_SAMPLING, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER_SAMPLING), TOLERANCE); - assertTrue(AggregationInspectionHelper.hasValue(stats)); + iw.addDocument(doc); } - ); + }, stats -> { + assertEquals(expected.count, stats.getCount(), 0); + assertEquals(expected.sum, stats.getSum(), TOLERANCE); + assertEquals(expected.min, stats.getMin(), 0); + assertEquals(expected.max, stats.getMax(), 0); + assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); + assertEquals(expected.sumOfSqrs, stats.getSumOfSquares(), TOLERANCE); + assertEquals(expected.stdDev(), stats.getStdDeviation(), TOLERANCE); + assertEquals(expected.stdDevPopulation(), stats.getStdDeviationPopulation(), TOLERANCE); + assertEquals(expected.stdDevSampling(), stats.getStdDeviationSampling(), TOLERANCE); + assertEquals(expected.variance(), stats.getVariance(), TOLERANCE); + assertEquals(expected.variancePopulation(), stats.getVariancePopulation(), TOLERANCE); + assertEquals(expected.varianceSampling(), stats.getVarianceSampling(), TOLERANCE); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.LOWER, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER), + TOLERANCE + ); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.UPPER, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER), + TOLERANCE + ); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.LOWER_POPULATION, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER_POPULATION), + TOLERANCE + ); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.UPPER_POPULATION, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER_POPULATION), + TOLERANCE + ); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.LOWER_SAMPLING, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER_SAMPLING), + TOLERANCE + ); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.UPPER_SAMPLING, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER_SAMPLING), + TOLERANCE + ); + assertTrue(AggregationInspectionHelper.hasValue(stats)); + }); } /** * Testcase for https://github.com/elastic/elasticsearch/issues/37303 */ public void testVarianceNonNegative() throws IOException { - MappedFieldType ft = - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); final ExtendedSimpleStatsAggregator expected = new ExtendedSimpleStatsAggregator(); - testCase(ft, - iw -> { - int numDocs = 3; - for (int i = 0; i < numDocs; i++) { - Document doc = new Document(); - double value = 49.95d; - long valueAsLong = NumericUtils.doubleToSortableLong(value); - doc.add(new SortedNumericDocValuesField("field", valueAsLong)); - expected.add(value); - iw.addDocument(doc); - } - }, - stats -> { - //since the value(49.95) is a constant, variance should be 0 - assertEquals(0.0d, stats.getVariance(), TOLERANCE); - assertEquals(0.0d, stats.getVariancePopulation(), TOLERANCE); - assertEquals(0.0d, stats.getVarianceSampling(), TOLERANCE); - assertEquals(0.0d, stats.getStdDeviation(), TOLERANCE); - assertEquals(0.0d, stats.getStdDeviationPopulation(), TOLERANCE); - assertEquals(0.0d, stats.getStdDeviationSampling(), TOLERANCE); + testCase(ft, iw -> { + int numDocs = 3; + for (int i = 0; i < numDocs; i++) { + Document doc = new Document(); + double value = 49.95d; + long valueAsLong = NumericUtils.doubleToSortableLong(value); + doc.add(new SortedNumericDocValuesField("field", valueAsLong)); + expected.add(value); + iw.addDocument(doc); } - ); + }, stats -> { + // since the value(49.95) is a constant, variance should be 0 + assertEquals(0.0d, stats.getVariance(), TOLERANCE); + assertEquals(0.0d, stats.getVariancePopulation(), TOLERANCE); + assertEquals(0.0d, stats.getVarianceSampling(), TOLERANCE); + assertEquals(0.0d, stats.getStdDeviation(), TOLERANCE); + assertEquals(0.0d, stats.getStdDeviationPopulation(), TOLERANCE); + assertEquals(0.0d, stats.getStdDeviationSampling(), TOLERANCE); + }); } public void testRandomLongs() throws IOException { - MappedFieldType ft = - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); final ExtendedSimpleStatsAggregator expected = new ExtendedSimpleStatsAggregator(); - testCase(ft, - iw -> { - int numDocs = randomIntBetween(10, 50); - for (int i = 0; i < numDocs; i++) { - Document doc = new Document(); - int numValues = randomIntBetween(1, 5); - for (int j = 0; j < numValues; j++) { - long value = randomIntBetween(-100, 100); - doc.add(new SortedNumericDocValuesField("field", value)); - expected.add(value); - } - iw.addDocument(doc); + testCase(ft, iw -> { + int numDocs = randomIntBetween(10, 50); + for (int i = 0; i < numDocs; i++) { + Document doc = new Document(); + int numValues = randomIntBetween(1, 5); + for (int j = 0; j < numValues; j++) { + long value = randomIntBetween(-100, 100); + doc.add(new SortedNumericDocValuesField("field", value)); + expected.add(value); } - }, - stats -> { - assertEquals(expected.count, stats.getCount(), 0); - assertEquals(expected.sum, stats.getSum(), TOLERANCE); - assertEquals(expected.min, stats.getMin(), 0); - assertEquals(expected.max, stats.getMax(), 0); - assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); - assertEquals(expected.sumOfSqrs, stats.getSumOfSquares(), TOLERANCE); - assertEquals(expected.stdDev(), stats.getStdDeviation(), TOLERANCE); - assertEquals(expected.stdDevPopulation(), stats.getStdDeviationPopulation(), TOLERANCE); - assertEquals(expected.stdDevSampling(), stats.getStdDeviationSampling(), TOLERANCE); - assertEquals(expected.variance(), stats.getVariance(), TOLERANCE); - assertEquals(expected.variancePopulation(), stats.getVariancePopulation(), TOLERANCE); - assertEquals(expected.varianceSampling(), stats.getVarianceSampling(), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.LOWER, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.UPPER, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.LOWER_POPULATION, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER_POPULATION), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.UPPER_POPULATION, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER_POPULATION), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.LOWER_SAMPLING, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER_SAMPLING), TOLERANCE); - assertEquals(expected.stdDevBound(ExtendedStats.Bounds.UPPER_SAMPLING, stats.getSigma()), - stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER_SAMPLING), TOLERANCE); - assertTrue(AggregationInspectionHelper.hasValue(stats)); + iw.addDocument(doc); } - ); + }, stats -> { + assertEquals(expected.count, stats.getCount(), 0); + assertEquals(expected.sum, stats.getSum(), TOLERANCE); + assertEquals(expected.min, stats.getMin(), 0); + assertEquals(expected.max, stats.getMax(), 0); + assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); + assertEquals(expected.sumOfSqrs, stats.getSumOfSquares(), TOLERANCE); + assertEquals(expected.stdDev(), stats.getStdDeviation(), TOLERANCE); + assertEquals(expected.stdDevPopulation(), stats.getStdDeviationPopulation(), TOLERANCE); + assertEquals(expected.stdDevSampling(), stats.getStdDeviationSampling(), TOLERANCE); + assertEquals(expected.variance(), stats.getVariance(), TOLERANCE); + assertEquals(expected.variancePopulation(), stats.getVariancePopulation(), TOLERANCE); + assertEquals(expected.varianceSampling(), stats.getVarianceSampling(), TOLERANCE); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.LOWER, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER), + TOLERANCE + ); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.UPPER, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER), + TOLERANCE + ); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.LOWER_POPULATION, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER_POPULATION), + TOLERANCE + ); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.UPPER_POPULATION, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER_POPULATION), + TOLERANCE + ); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.LOWER_SAMPLING, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER_SAMPLING), + TOLERANCE + ); + assertEquals( + expected.stdDevBound(ExtendedStats.Bounds.UPPER_SAMPLING, stats.getSigma()), + stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER_SAMPLING), + TOLERANCE + ); + assertTrue(AggregationInspectionHelper.hasValue(stats)); + }); } public void testSummationAccuracy() throws IOException { - double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7}; + double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifyStatsOfDoubles(values, 13.5, 16.21, 0d); // Summing up an array which contains NaN and infinities and expect a result same as naive summation @@ -212,8 +233,7 @@ public void testSummationAccuracy() throws IOException { verifyStatsOfDoubles(largeValues, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, 0d); } - private void verifyStatsOfDoubles(double[] values, double expectedSum, - double expectedSumOfSqrs, double delta) throws IOException { + private void verifyStatsOfDoubles(double[] values, double expectedSum, double expectedSumOfSqrs, double delta) throws IOException { final String fieldName = "field"; MappedFieldType ft = new NumberFieldMapper.NumberFieldType(fieldName, NumberFieldMapper.NumberType.DOUBLE); double max = Double.NEGATIVE_INFINITY; @@ -224,28 +244,26 @@ private void verifyStatsOfDoubles(double[] values, double expectedSum, } double expectedMax = max; double expectedMin = min; - testCase(ft, - iw -> { - for (double value : values) { - iw.addDocument(singleton(new NumericDocValuesField(fieldName, NumericUtils.doubleToSortableLong(value)))); - } - }, - stats -> { - assertEquals(values.length, stats.getCount()); - assertEquals(expectedSum / values.length, stats.getAvg(), delta); - assertEquals(expectedSum, stats.getSum(), delta); - assertEquals(expectedSumOfSqrs, stats.getSumOfSquares(), delta); - assertEquals(expectedMax, stats.getMax(), 0d); - assertEquals(expectedMin, stats.getMin(), 0d); + testCase(ft, iw -> { + for (double value : values) { + iw.addDocument(singleton(new NumericDocValuesField(fieldName, NumericUtils.doubleToSortableLong(value)))); } - ); + }, stats -> { + assertEquals(values.length, stats.getCount()); + assertEquals(expectedSum / values.length, stats.getAvg(), delta); + assertEquals(expectedSum, stats.getSum(), delta); + assertEquals(expectedSumOfSqrs, stats.getSumOfSquares(), delta); + assertEquals(expectedMax, stats.getMax(), 0d); + assertEquals(expectedMin, stats.getMin(), 0d); + }); } - public void testCase(MappedFieldType ft, - CheckedConsumer buildIndex, - Consumer verify) throws IOException { - ExtendedStatsAggregationBuilder aggBuilder = new ExtendedStatsAggregationBuilder("my_agg") - .field("field") + public void testCase( + MappedFieldType ft, + CheckedConsumer buildIndex, + Consumer verify + ) throws IOException { + ExtendedStatsAggregationBuilder aggBuilder = new ExtendedStatsAggregationBuilder("my_agg").field("field") .sigma(randomDoubleBetween(0, 10, true)); testCase(aggBuilder, new MatchAllDocsQuery(), buildIndex, verify, ft); @@ -296,12 +314,12 @@ void add(double value) { double variancePopulation() { double variance = (sumOfSqrs - ((sum * sum) / count)) / count; - return variance < 0 ? 0 : variance; + return variance < 0 ? 0 : variance; } double varianceSampling() { double variance = (sumOfSqrs - ((sum * sum) / count)) / (count - 1); - return variance < 0 ? 0 : variance; + return variance < 0 ? 0 : variance; } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java index 7cfbba84e1588..8f14b1e711353 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java @@ -39,9 +39,7 @@ public class GeoBoundsAggregatorTests extends AggregatorTestCase { public void testEmpty() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg") - .field("field") - .wrapLongitude(false); + GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field").wrapLongitude(false); MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { @@ -66,9 +64,7 @@ public void testUnmappedFieldWithDocs() throws Exception { w.addDocument(doc); } - GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg") - .field("non_existent") - .wrapLongitude(false); + GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("non_existent").wrapLongitude(false); MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { @@ -99,8 +95,7 @@ public void testMissing() throws Exception { // valid missing values for (Object missingVal : List.of("POINT(" + lon + " " + lat + ")", lat + ", " + lon, new GeoPoint(lat, lon))) { - GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg") - .field("field") + GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field") .missing(missingVal) .wrapLongitude(false); @@ -126,14 +121,15 @@ public void testInvalidMissing() throws Exception { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); - GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg") - .field("field") + GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field") .missing("invalid") .wrapLongitude(false); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - ElasticsearchParseException exception = expectThrows(ElasticsearchParseException.class, - () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType)); + ElasticsearchParseException exception = expectThrows( + ElasticsearchParseException.class, + () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType) + ); assertThat(exception.getMessage(), startsWith("unsupported symbol")); } } @@ -147,8 +143,7 @@ public void testRandom() throws Exception { double negLeft = Double.POSITIVE_INFINITY; double negRight = Double.NEGATIVE_INFINITY; int numDocs = randomIntBetween(50, 100); - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (int i = 0; i < numDocs; i++) { Document doc = new Document(); int numValues = randomIntBetween(1, 5); @@ -176,9 +171,7 @@ public void testRandom() throws Exception { } w.addDocument(doc); } - GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg") - .field("field") - .wrapLongitude(false); + GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field").wrapLongitude(false); MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java index 1eb8648462905..12ef3e97489bf 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java @@ -19,10 +19,10 @@ protected GeoBoundsAggregationBuilder createTestAggregatorBuilder() { factory.field(field); if (randomBoolean()) { factory.wrapLongitude(randomBoolean()); - } + } if (randomBoolean()) { factory.missing("0,0"); - } + } return factory; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java index 0a1e2d3569bb9..85f9348a9408a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java @@ -32,10 +32,8 @@ public class GeoCentroidAggregatorTests extends AggregatorTestCase { private static final double GEOHASH_TOLERANCE = 1E-6D; public void testEmpty() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg") - .field("field"); + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg").field("field"); MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { @@ -48,10 +46,8 @@ public void testEmpty() throws Exception { } public void testUnmapped() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg") - .field("another_field"); + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg").field("another_field"); Document document = new Document(); document.add(new LatLonDocValuesField("field", 10, 10)); @@ -72,10 +68,8 @@ public void testUnmapped() throws Exception { } public void testUnmappedWithMissing() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg") - .field("another_field") + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg").field("another_field") .missing("53.69437,6.475031"); GeoPoint expectedCentroid = new GeoPoint(53.69437, 6.475031); @@ -96,11 +90,10 @@ public void testUnmappedWithMissing() throws Exception { public void testSingleValuedField() throws Exception { int numDocs = scaledRandomIntBetween(64, 256); int numUniqueGeoPoints = randomIntBetween(1, numDocs); - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { GeoPoint expectedCentroid = new GeoPoint(0, 0); GeoPoint[] singleValues = new GeoPoint[numUniqueGeoPoints]; - for (int i = 0 ; i < singleValues.length; i++) { + for (int i = 0; i < singleValues.length; i++) { singleValues[i] = RandomGeoGenerator.randomPoint(random()); } GeoPoint singleVal; @@ -109,8 +102,10 @@ public void testSingleValuedField() throws Exception { Document document = new Document(); document.add(new LatLonDocValuesField("field", singleVal.getLat(), singleVal.getLon())); w.addDocument(document); - expectedCentroid = expectedCentroid.reset(expectedCentroid.lat() + (singleVal.lat() - expectedCentroid.lat()) / (i + 1), - expectedCentroid.lon() + (singleVal.lon() - expectedCentroid.lon()) / (i + 1)); + expectedCentroid = expectedCentroid.reset( + expectedCentroid.lat() + (singleVal.lat() - expectedCentroid.lat()) / (i + 1), + expectedCentroid.lon() + (singleVal.lon() - expectedCentroid.lon()) / (i + 1) + ); } assertCentroid(w, expectedCentroid); } @@ -119,26 +114,27 @@ public void testSingleValuedField() throws Exception { public void testMultiValuedField() throws Exception { int numDocs = scaledRandomIntBetween(64, 256); int numUniqueGeoPoints = randomIntBetween(1, numDocs); - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { GeoPoint expectedCentroid = new GeoPoint(0, 0); GeoPoint[] multiValues = new GeoPoint[numUniqueGeoPoints]; - for (int i = 0 ; i < multiValues.length; i++) { + for (int i = 0; i < multiValues.length; i++) { multiValues[i] = RandomGeoGenerator.randomPoint(random()); } final GeoPoint[] multiVal = new GeoPoint[2]; for (int i = 0; i < numDocs; i++) { multiVal[0] = multiValues[i % numUniqueGeoPoints]; - multiVal[1] = multiValues[(i+1) % numUniqueGeoPoints]; + multiVal[1] = multiValues[(i + 1) % numUniqueGeoPoints]; Document document = new Document(); document.add(new LatLonDocValuesField("field", multiVal[0].getLat(), multiVal[0].getLon())); document.add(new LatLonDocValuesField("field", multiVal[1].getLat(), multiVal[1].getLon())); w.addDocument(document); - double newMVLat = (multiVal[0].lat() + multiVal[1].lat())/2d; - double newMVLon = (multiVal[0].lon() + multiVal[1].lon())/2d; - expectedCentroid = expectedCentroid.reset(expectedCentroid.lat() + (newMVLat - expectedCentroid.lat()) / (i + 1), - expectedCentroid.lon() + (newMVLon - expectedCentroid.lon()) / (i + 1)); + double newMVLat = (multiVal[0].lat() + multiVal[1].lat()) / 2d; + double newMVLon = (multiVal[0].lon() + multiVal[1].lon()) / 2d; + expectedCentroid = expectedCentroid.reset( + expectedCentroid.lat() + (newMVLat - expectedCentroid.lat()) / (i + 1), + expectedCentroid.lon() + (newMVLon - expectedCentroid.lon()) / (i + 1) + ); } assertCentroid(w, expectedCentroid); } @@ -146,8 +142,7 @@ public void testMultiValuedField() throws Exception { private void assertCentroid(RandomIndexWriter w, GeoPoint expectedCentroid) throws IOException { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); - GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg") - .field("field"); + GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg").field("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java index 61bc558ef6f6e..eddc12a095ab0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java @@ -30,53 +30,45 @@ import java.util.Iterator; import java.util.List; - public class HDRPercentileRanksAggregatorTests extends AggregatorTestCase { @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new PercentileRanksAggregationBuilder("hdr_ranks", new double[]{0.1, 0.5, 12}) - .field(fieldName) + return new PercentileRanksAggregationBuilder("hdr_ranks", new double[] { 0.1, 0.5, 12 }).field(fieldName) .percentilesConfig(new PercentilesConfig.Hdr()); } @Override protected List getSupportedValuesSourceTypes() { - return List.of(CoreValuesSourceType.NUMERIC, - CoreValuesSourceType.DATE, - CoreValuesSourceType.BOOLEAN); + return List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN); } public void testEmpty() throws IOException { - PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.5}) - .field("field") - .method(PercentilesMethod.HDR); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.5 }).field("field") + .method(PercentilesMethod.HDR); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = new MultiReader()) { IndexSearcher searcher = new IndexSearcher(reader); PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); Percentile rank = ranks.iterator().next(); assertEquals(Double.NaN, rank.getPercent(), 0d); assertEquals(0.5, rank.getValue(), 0d); - assertFalse(AggregationInspectionHelper.hasValue((InternalHDRPercentileRanks)ranks)); + assertFalse(AggregationInspectionHelper.hasValue((InternalHDRPercentileRanks) ranks)); } } public void testSimple() throws IOException { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (double value : new double[] {3, 0.2, 10}) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (double value : new double[] { 3, 0.2, 10 }) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); } - PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.1, 0.5, 12}) - .field("field") - .method(PercentilesMethod.HDR); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.1, 0.5, 12 }) + .field("field") + .method(PercentilesMethod.HDR); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); @@ -92,20 +84,24 @@ public void testSimple() throws IOException { assertEquals(12, rank.getValue(), 0d); assertThat(rank.getPercent(), Matchers.equalTo(100d)); assertFalse(rankIterator.hasNext()); - assertTrue(AggregationInspectionHelper.hasValue((InternalHDRPercentileRanks)ranks)); + assertTrue(AggregationInspectionHelper.hasValue((InternalHDRPercentileRanks) ranks)); } } } public void testNullValues() throws IOException { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new PercentileRanksAggregationBuilder("my_agg", null).field("field").method(PercentilesMethod.HDR)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new PercentileRanksAggregationBuilder("my_agg", null).field("field").method(PercentilesMethod.HDR) + ); assertThat(e.getMessage(), Matchers.equalTo("[values] must not be null: [my_agg]")); } public void testEmptyValues() throws IOException { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new PercentileRanksAggregationBuilder("my_agg", new double[0]).field("field").method(PercentilesMethod.HDR)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new PercentileRanksAggregationBuilder("my_agg", new double[0]).field("field").method(PercentilesMethod.HDR) + ); assertThat(e.getMessage(), Matchers.equalTo("[values] must not be an empty array: [my_agg]")); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java index c4a5430c43558..0a82a569e8c55 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java @@ -48,16 +48,12 @@ public class HDRPercentilesAggregatorTests extends AggregatorTestCase { @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new PercentilesAggregationBuilder("hdr_percentiles") - .field(fieldName) - .percentilesConfig(new PercentilesConfig.Hdr()); + return new PercentilesAggregationBuilder("hdr_percentiles").field(fieldName).percentilesConfig(new PercentilesConfig.Hdr()); } @Override protected List getSupportedValuesSourceTypes() { - return List.of(CoreValuesSourceType.NUMERIC, - CoreValuesSourceType.DATE, - CoreValuesSourceType.BOOLEAN); + return List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN); } public void testNoDocs() throws IOException { @@ -75,12 +71,11 @@ public void testNoDocs() throws IOException { public void testStringField() throws IOException { final String fieldName = "string"; MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType(fieldName); - expectThrows(IllegalArgumentException.class, - () -> testCase(new DocValuesFieldExistsQuery(fieldName), iw -> { - iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("bogus")))); - iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("zwomp")))); - iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foobar")))); - }, hdr -> {}, fieldType, fieldName)); + expectThrows(IllegalArgumentException.class, () -> testCase(new DocValuesFieldExistsQuery(fieldName), iw -> { + iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("bogus")))); + iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("zwomp")))); + iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foobar")))); + }, hdr -> {}, fieldType, fieldName)); } /** @@ -91,12 +86,18 @@ public void testRangeField() throws IOException { // Currently fails (throws ClassCast exception), but should be fixed once HDRPercentileAggregation uses the ValuesSource registry final String fieldName = "range"; MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, RangeType.DOUBLE); - RangeFieldMapper.Range range =new RangeFieldMapper.Range(RangeType.DOUBLE, 1.0D, 5.0D, true, true); + RangeFieldMapper.Range range = new RangeFieldMapper.Range(RangeType.DOUBLE, 1.0D, 5.0D, true, true); BytesRef encodedRange = RangeType.DOUBLE.encodeRanges(Collections.singleton(range)); - expectThrows(IllegalArgumentException.class, - () -> testCase(new DocValuesFieldExistsQuery(fieldName), iw -> { - iw.addDocument(singleton(new BinaryDocValuesField(fieldName, encodedRange))); - }, hdr -> {}, fieldType, fieldName)); + expectThrows( + IllegalArgumentException.class, + () -> testCase( + new DocValuesFieldExistsQuery(fieldName), + iw -> { iw.addDocument(singleton(new BinaryDocValuesField(fieldName, encodedRange))); }, + hdr -> {}, + fieldType, + fieldName + ) + ); } public void testNoMatchingField() throws IOException { @@ -166,8 +167,7 @@ public void testQueryFiltering() throws IOException { public void testHdrThenTdigestSettings() throws Exception { int sigDigits = randomIntBetween(1, 5); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - percentiles("percentiles") - .numberOfSignificantValueDigits(sigDigits) + percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) .compression(100.0) // <-- this should trigger an exception .field("value"); @@ -175,14 +175,19 @@ public void testHdrThenTdigestSettings() throws Exception { assertThat(e.getMessage(), equalTo("Cannot set [compression] because the method has already been configured for HDRHistogram")); } - private void testCase(Query query, CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) + throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); testCase(query, buildIndex, verify, fieldType, "number"); } - private void testCase(Query query, CheckedConsumer buildIndex, - Consumer verify, MappedFieldType fieldType, String fieldName) throws IOException { + private void testCase( + Query query, + CheckedConsumer buildIndex, + Consumer verify, + MappedFieldType fieldType, + String fieldName + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { buildIndex.accept(indexWriter); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparseTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparseTests.java index ae1c9e234aecc..2938569acf71a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparseTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusSparseTests.java @@ -36,7 +36,7 @@ public void testEquivalence() throws IOException { final HyperLogLogPlusPlus single = new HyperLogLogPlusPlus(p, BigArrays.NON_RECYCLING_INSTANCE, 0); final int numBuckets = randomIntBetween(2, 100); final int numValues = randomIntBetween(1, 100000); - final int maxValue = randomIntBetween(1, randomBoolean() ? 1000: 1000000); + final int maxValue = randomIntBetween(1, randomBoolean() ? 1000 : 1000000); for (int i = 0; i < numValues; ++i) { final int n = randomInt(maxValue); final long hash = BitMixer.mix64(n); @@ -68,8 +68,12 @@ public void testEquivalence() throws IOException { } } - private void checkEquivalence(AbstractHyperLogLogPlusPlus first, int firstBucket, - AbstractHyperLogLogPlusPlus second, int secondBucket) { + private void checkEquivalence( + AbstractHyperLogLogPlusPlus first, + int firstBucket, + AbstractHyperLogLogPlusPlus second, + int secondBucket + ) { assertEquals(first.hashCode(firstBucket), second.hashCode(secondBucket)); assertEquals(first.cardinality(firstBucket), second.cardinality(0)); assertTrue(first.equals(firstBucket, second, secondBucket)); @@ -82,6 +86,7 @@ public void testCircuitBreakerOnConstruction() { CircuitBreakerService breakerService = mock(CircuitBreakerService.class); when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(new NoopCircuitBreaker(CircuitBreaker.REQUEST) { private int countDown = whenToBreak; + @Override public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { if (countDown-- == 0) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusTests.java index 5be429bef9b76..d06cfb4a8d629 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusTests.java @@ -58,7 +58,7 @@ private void testEncodeDecode(int p1, long hash) { public void testAccuracy() { final long bucket = randomInt(20); final int numValues = randomIntBetween(1, 100000); - final int maxValue = randomIntBetween(1, randomBoolean() ? 1000: 100000); + final int maxValue = randomIntBetween(1, randomBoolean() ? 1000 : 100000); final int p = randomIntBetween(14, MAX_PRECISION); IntHashSet set = new IntHashSet(); HyperLogLogPlusPlus e = new HyperLogLogPlusPlus(p, BigArrays.NON_RECYCLING_INSTANCE, 1); @@ -68,7 +68,7 @@ public void testAccuracy() { final long hash = BitMixer.mix64(n); e.collect(bucket, hash); if (randomInt(100) == 0) { - //System.out.println(e.cardinality(bucket) + " <> " + set.size()); + // System.out.println(e.cardinality(bucket) + " <> " + set.size()); assertThat((double) e.cardinality(bucket), closeTo(set.size(), 0.1 * set.size())); } } @@ -85,7 +85,7 @@ public void testMerge() { multi[i] = new HyperLogLogPlusPlus(p, BigArrays.NON_RECYCLING_INSTANCE, 5); } final int numValues = randomIntBetween(1, 100000); - final int maxValue = randomIntBetween(1, randomBoolean() ? 1000: 1000000); + final int maxValue = randomIntBetween(1, randomBoolean() ? 1000 : 1000000); for (int i = 0; i < numValues; ++i) { final int n = randomInt(maxValue); final long hash = BitMixer.mix64(n); @@ -135,6 +135,7 @@ public void testCircuitBreakerOnConstruction() { CircuitBreakerService breakerService = mock(CircuitBreakerService.class); when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(new NoopCircuitBreaker(CircuitBreaker.REQUEST) { private int countDown = whenToBreak; + @Override public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { if (countDown-- == 0) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java index 0c45d4973e987..dfd6d68e2daf7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java @@ -41,7 +41,7 @@ protected void assertReduced(InternalAvg reduced, List inputs) { } public void testSummationAccuracy() { - double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7}; + double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifyAvgOfDoubles(values, 0.9, 0d); int n = randomIntBetween(5, 10); @@ -97,33 +97,33 @@ protected InternalAvg mutateInstance(InternalAvg instance) { DocValueFormat formatter = instance.getFormatter(); Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(sum)) { - sum += between(1, 100); - } else { - sum = between(1, 100); - } - break; - case 2: - if (Double.isFinite(count)) { - count += between(1, 100); - } else { - count = between(1, 100); - } - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(sum)) { + sum += between(1, 100); + } else { + sum = between(1, 100); + } + break; + case 2: + if (Double.isFinite(count)) { + count += between(1, 100); + } else { + count = between(1, 100); + } + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalAvg(name, sum, count, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java index 6f2f0778fa6c9..ef4d49363bc52 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java @@ -9,10 +9,11 @@ package org.elasticsearch.search.aggregations.metrics; import com.carrotsearch.hppc.BitMixer; -import org.elasticsearch.core.Releasables; + import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; +import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.test.InternalAggregationTestCase; @@ -34,7 +35,7 @@ public void setUp() throws Exception { p = randomIntBetween(AbstractHyperLogLog.MIN_PRECISION, AbstractHyperLogLog.MAX_PRECISION); } - @After //we force @After to have it run before ESTestCase#after otherwise it fails + @After // we force @After to have it run before ESTestCase#after otherwise it fails @Override public void tearDown() throws Exception { super.tearDown(); @@ -45,8 +46,11 @@ public void tearDown() throws Exception { @Override protected InternalCardinality createTestInstance(String name, Map metadata) { - HyperLogLogPlusPlus hllpp = new HyperLogLogPlusPlus(p, - new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()), 1); + HyperLogLogPlusPlus hllpp = new HyperLogLogPlusPlus( + p, + new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()), + 1 + ); algos.add(hllpp); for (int i = 0; i < 100; i++) { hllpp.collect(0, BitMixer.mix64(randomIntBetween(1, 100))); @@ -56,8 +60,7 @@ protected InternalCardinality createTestInstance(String name, Map inputs) { - HyperLogLogPlusPlus[] algos = inputs.stream().map(InternalCardinality::getState) - .toArray(size -> new HyperLogLogPlusPlus[size]); + HyperLogLogPlusPlus[] algos = inputs.stream().map(InternalCardinality::getState).toArray(size -> new HyperLogLogPlusPlus[size]); if (algos.length > 0) { HyperLogLogPlusPlus result = algos[0]; for (int i = 1; i < algos.length; i++) { @@ -82,28 +85,31 @@ protected InternalCardinality mutateInstance(InternalCardinality instance) { AbstractHyperLogLogPlusPlus state = instance.getState(); Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - HyperLogLogPlusPlus newState = new HyperLogLogPlusPlus(state.precision(), - new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()), 0); - for (int i = 0; i < 10; i++) { - newState.collect(0, BitMixer.mix64(randomIntBetween(500, 10000))); - } - algos.add(newState); - state = newState; - break; - case 2: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + HyperLogLogPlusPlus newState = new HyperLogLogPlusPlus( + state.precision(), + new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()), + 0 + ); + for (int i = 0; i < 10; i++) { + newState.collect(0, BitMixer.mix64(randomIntBetween(500, 10000))); + } + algos.add(newState); + state = newState; + break; + case 2: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalCardinality(name, state, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java index fc3a5f8dd36a6..65c39a84f4d07 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java @@ -39,8 +39,17 @@ protected InternalExtendedStats createTestInstance(String name, Map metadata) { + protected InternalExtendedStats createInstance( + String name, + long count, + double sum, + double min, + double max, + double sumOfSqrs, + double sigma, + DocValueFormat formatter, + Map metadata + ) { return new InternalExtendedStats(name, count, sum, min, max, sumOfSqrs, sigma, formatter, metadata); } @@ -82,24 +91,36 @@ protected void assertFromXContent(InternalExtendedStats aggregation, ParsedAggre InternalStatsTests.assertStats(aggregation, parsed); long count = aggregation.getCount(); - // for count == 0, fields are rendered as `null`, so we test that we parse to default values used also in the reduce phase - assertEquals(count > 0 ? aggregation.getSumOfSquares() : 0 , parsed.getSumOfSquares(), 0); - assertEquals(count > 0 ? aggregation.getVariance() : 0 , parsed.getVariance(), 0); - assertEquals(count > 0 ? aggregation.getVariancePopulation() : 0 , parsed.getVariancePopulation(), 0); - assertEquals(count > 0 ? aggregation.getVarianceSampling() : 0 , parsed.getVarianceSampling(), 0); - assertEquals(count > 0 ? aggregation.getStdDeviation() : 0 , parsed.getStdDeviation(), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationPopulation() : 0 , parsed.getStdDeviationPopulation(), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationSampling() : 0 , parsed.getStdDeviationSampling(), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationBound(Bounds.LOWER) : 0 , parsed.getStdDeviationBound(Bounds.LOWER), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationBound(Bounds.UPPER) : 0 , parsed.getStdDeviationBound(Bounds.UPPER), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationBound(Bounds.LOWER_POPULATION) : 0 , - parsed.getStdDeviationBound(Bounds.LOWER_POPULATION), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationBound(Bounds.UPPER_POPULATION) : 0 , - parsed.getStdDeviationBound(Bounds.UPPER_POPULATION), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationBound(Bounds.LOWER_SAMPLING) : 0 , - parsed.getStdDeviationBound(Bounds.LOWER_SAMPLING), 0); - assertEquals(count > 0 ? aggregation.getStdDeviationBound(Bounds.UPPER_SAMPLING) : 0 , - parsed.getStdDeviationBound(Bounds.UPPER_SAMPLING), 0); + // for count == 0, fields are rendered as `null`, so we test that we parse to default values used also in the reduce phase + assertEquals(count > 0 ? aggregation.getSumOfSquares() : 0, parsed.getSumOfSquares(), 0); + assertEquals(count > 0 ? aggregation.getVariance() : 0, parsed.getVariance(), 0); + assertEquals(count > 0 ? aggregation.getVariancePopulation() : 0, parsed.getVariancePopulation(), 0); + assertEquals(count > 0 ? aggregation.getVarianceSampling() : 0, parsed.getVarianceSampling(), 0); + assertEquals(count > 0 ? aggregation.getStdDeviation() : 0, parsed.getStdDeviation(), 0); + assertEquals(count > 0 ? aggregation.getStdDeviationPopulation() : 0, parsed.getStdDeviationPopulation(), 0); + assertEquals(count > 0 ? aggregation.getStdDeviationSampling() : 0, parsed.getStdDeviationSampling(), 0); + assertEquals(count > 0 ? aggregation.getStdDeviationBound(Bounds.LOWER) : 0, parsed.getStdDeviationBound(Bounds.LOWER), 0); + assertEquals(count > 0 ? aggregation.getStdDeviationBound(Bounds.UPPER) : 0, parsed.getStdDeviationBound(Bounds.UPPER), 0); + assertEquals( + count > 0 ? aggregation.getStdDeviationBound(Bounds.LOWER_POPULATION) : 0, + parsed.getStdDeviationBound(Bounds.LOWER_POPULATION), + 0 + ); + assertEquals( + count > 0 ? aggregation.getStdDeviationBound(Bounds.UPPER_POPULATION) : 0, + parsed.getStdDeviationBound(Bounds.UPPER_POPULATION), + 0 + ); + assertEquals( + count > 0 ? aggregation.getStdDeviationBound(Bounds.LOWER_SAMPLING) : 0, + parsed.getStdDeviationBound(Bounds.LOWER_SAMPLING), + 0 + ); + assertEquals( + count > 0 ? aggregation.getStdDeviationBound(Bounds.UPPER_SAMPLING) : 0, + parsed.getStdDeviationBound(Bounds.UPPER_SAMPLING), + 0 + ); // also as_string values are only rendered for count != 0 if (count > 0) { assertEquals(aggregation.getSumOfSquaresAsString(), parsed.getSumOfSquaresAsString()); @@ -111,14 +132,22 @@ protected void assertFromXContent(InternalExtendedStats aggregation, ParsedAggre assertEquals(aggregation.getStdDeviationSamplingAsString(), parsed.getStdDeviationSamplingAsString()); assertEquals(aggregation.getStdDeviationBoundAsString(Bounds.LOWER), parsed.getStdDeviationBoundAsString(Bounds.LOWER)); assertEquals(aggregation.getStdDeviationBoundAsString(Bounds.UPPER), parsed.getStdDeviationBoundAsString(Bounds.UPPER)); - assertEquals(aggregation.getStdDeviationBoundAsString(Bounds.LOWER_POPULATION), - parsed.getStdDeviationBoundAsString(Bounds.LOWER_POPULATION)); - assertEquals(aggregation.getStdDeviationBoundAsString(Bounds.UPPER_POPULATION), - parsed.getStdDeviationBoundAsString(Bounds.UPPER_POPULATION)); - assertEquals(aggregation.getStdDeviationBoundAsString(Bounds.LOWER_SAMPLING), - parsed.getStdDeviationBoundAsString(Bounds.LOWER_SAMPLING)); - assertEquals(aggregation.getStdDeviationBoundAsString(Bounds.UPPER_SAMPLING), - parsed.getStdDeviationBoundAsString(Bounds.UPPER_SAMPLING)); + assertEquals( + aggregation.getStdDeviationBoundAsString(Bounds.LOWER_POPULATION), + parsed.getStdDeviationBoundAsString(Bounds.LOWER_POPULATION) + ); + assertEquals( + aggregation.getStdDeviationBoundAsString(Bounds.UPPER_POPULATION), + parsed.getStdDeviationBoundAsString(Bounds.UPPER_POPULATION) + ); + assertEquals( + aggregation.getStdDeviationBoundAsString(Bounds.LOWER_SAMPLING), + parsed.getStdDeviationBoundAsString(Bounds.LOWER_SAMPLING) + ); + assertEquals( + aggregation.getStdDeviationBoundAsString(Bounds.UPPER_SAMPLING), + parsed.getStdDeviationBoundAsString(Bounds.UPPER_SAMPLING) + ); } } @@ -134,67 +163,67 @@ protected InternalExtendedStats mutateInstance(InternalExtendedStats instance) { DocValueFormat formatter = instance.format; Map metadata = instance.getMetadata(); switch (between(0, 7)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(count)) { - count += between(1, 100); - } else { - count = between(1, 100); - } - break; - case 2: - if (Double.isFinite(sum)) { - sum += between(1, 100); - } else { - sum = between(1, 100); - } - break; - case 3: - if (Double.isFinite(min)) { - min += between(1, 100); - } else { - min = between(1, 100); - } - break; - case 4: - if (Double.isFinite(max)) { - max += between(1, 100); - } else { - max = between(1, 100); - } - break; - case 5: - if (Double.isFinite(sumOfSqrs)) { - sumOfSqrs += between(1, 100); - } else { - sumOfSqrs = between(1, 100); - } - break; - case 6: - if (Double.isFinite(sigma)) { - sigma += between(1, 10); - } else { - sigma = between(1, 10); - } - break; - case 7: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(count)) { + count += between(1, 100); + } else { + count = between(1, 100); + } + break; + case 2: + if (Double.isFinite(sum)) { + sum += between(1, 100); + } else { + sum = between(1, 100); + } + break; + case 3: + if (Double.isFinite(min)) { + min += between(1, 100); + } else { + min = between(1, 100); + } + break; + case 4: + if (Double.isFinite(max)) { + max += between(1, 100); + } else { + max = between(1, 100); + } + break; + case 5: + if (Double.isFinite(sumOfSqrs)) { + sumOfSqrs += between(1, 100); + } else { + sumOfSqrs = between(1, 100); + } + break; + case 6: + if (Double.isFinite(sigma)) { + sigma += between(1, 10); + } else { + sigma = between(1, 10); + } + break; + case 7: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalExtendedStats(name, count, sum, min, max, sumOfSqrs, sigma, formatter, metadata); } public void testSummationAccuracy() { - double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7}; + double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifySumOfSqrsOfDoubles(values, 13.5, 0d); int n = randomIntBetween(5, 10); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBoundsTests.java index f9ed804a5f343..00e17def2a1e2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBoundsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBoundsTests.java @@ -24,9 +24,17 @@ public class InternalGeoBoundsTests extends InternalAggregationTestCase metadata) { // we occasionally want to test top = Double.NEGATIVE_INFINITY since this triggers empty xContent object double top = frequently() ? randomDouble() : Double.NEGATIVE_INFINITY; - InternalGeoBounds geo = new InternalGeoBounds(name, - top, randomDouble(), randomDouble(), randomDouble(), - randomDouble(), randomDouble(), randomBoolean(), metadata); + InternalGeoBounds geo = new InternalGeoBounds( + name, + top, + randomDouble(), + randomDouble(), + randomDouble(), + randomDouble(), + randomDouble(), + randomBoolean(), + metadata + ); return geo; } @@ -95,44 +103,44 @@ protected InternalGeoBounds mutateInstance(InternalGeoBounds instance) { boolean wrapLongitude = instance.wrapLongitude; Map metadata = instance.getMetadata(); switch (between(0, 8)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(top)) { - top += between(1, 20); - } else { - top = randomDouble(); - } - break; - case 2: - bottom += between(1, 20); - break; - case 3: - posLeft += between(1, 20); - break; - case 4: - posRight += between(1, 20); - break; - case 5: - negLeft += between(1, 20); - break; - case 6: - negRight += between(1, 20); - break; - case 7: - wrapLongitude = wrapLongitude == false; - break; - case 8: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(top)) { + top += between(1, 20); + } else { + top = randomDouble(); + } + break; + case 2: + bottom += between(1, 20); + break; + case 3: + posLeft += between(1, 20); + break; + case 4: + posRight += between(1, 20); + break; + case 5: + negLeft += between(1, 20); + break; + case 6: + negRight += between(1, 20); + break; + case 7: + wrapLongitude = wrapLongitude == false; + break; + case 8: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroidTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroidTests.java index 135910e972b4f..379a7610eac5e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroidTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroidTests.java @@ -52,17 +52,20 @@ protected void assertReduced(InternalGeoCentroid reduced, List 0) { - assertEquals(latSum/totalCount, reduced.centroid().getLat(), 1E-5D); - assertEquals(lonSum/totalCount, reduced.centroid().getLon(), 1E-5D); + assertEquals(latSum / totalCount, reduced.centroid().getLat(), 1E-5D); + assertEquals(lonSum / totalCount, reduced.centroid().getLon(), 1E-5D); } assertEquals(totalCount, reduced.count()); } public void testReduceMaxCount() { - InternalGeoCentroid maxValueGeoCentroid = new InternalGeoCentroid("agg", new GeoPoint(10, 0), - Long.MAX_VALUE, Collections.emptyMap()); - InternalGeoCentroid reducedGeoCentroid = maxValueGeoCentroid - .reduce(Collections.singletonList(maxValueGeoCentroid), null); + InternalGeoCentroid maxValueGeoCentroid = new InternalGeoCentroid( + "agg", + new GeoPoint(10, 0), + Long.MAX_VALUE, + Collections.emptyMap() + ); + InternalGeoCentroid reducedGeoCentroid = maxValueGeoCentroid.reduce(Collections.singletonList(maxValueGeoCentroid), null); assertThat(reducedGeoCentroid.count(), equalTo(Long.MAX_VALUE)); } @@ -82,41 +85,41 @@ protected InternalGeoCentroid mutateInstance(InternalGeoCentroid instance) { long count = instance.count(); Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - count += between(1, 100); - if (centroid == null) { - // if the new count is > 0 then we need to make sure there is a - // centroid or the constructor will throw an exception - centroid = new GeoPoint(randomDoubleBetween(-90, 90, false), randomDoubleBetween(-180, 180, false)); - } - break; - case 2: - if (centroid == null) { - centroid = new GeoPoint(randomDoubleBetween(-90, 90, false), randomDoubleBetween(-180, 180, false)); - count = between(1, 100); - } else { - GeoPoint newCentroid = new GeoPoint(centroid); - if (randomBoolean()) { - newCentroid.resetLat(centroid.getLat() / 2.0); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + count += between(1, 100); + if (centroid == null) { + // if the new count is > 0 then we need to make sure there is a + // centroid or the constructor will throw an exception + centroid = new GeoPoint(randomDoubleBetween(-90, 90, false), randomDoubleBetween(-180, 180, false)); + } + break; + case 2: + if (centroid == null) { + centroid = new GeoPoint(randomDoubleBetween(-90, 90, false), randomDoubleBetween(-180, 180, false)); + count = between(1, 100); } else { - newCentroid.resetLon(centroid.getLon() / 2.0); + GeoPoint newCentroid = new GeoPoint(centroid); + if (randomBoolean()) { + newCentroid.resetLat(centroid.getLat() / 2.0); + } else { + newCentroid.resetLon(centroid.getLon() / 2.0); + } + centroid = newCentroid; } - centroid = newCentroid; - } - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalGeoCentroid(name, centroid, count, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesRanksTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesRanksTests.java index b4a4d60f75274..77c1426ca632c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesRanksTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesRanksTests.java @@ -16,12 +16,17 @@ import java.util.List; import java.util.Map; - public class InternalHDRPercentilesRanksTests extends InternalPercentilesRanksTestCase { @Override - protected InternalHDRPercentileRanks createTestInstance(String name, Map metadata, - boolean keyed, DocValueFormat format, double[] percents, double[] values) { + protected InternalHDRPercentileRanks createTestInstance( + String name, + Map metadata, + boolean keyed, + DocValueFormat format, + double[] percents, + double[] values + ) { final DoubleHistogram state = new DoubleHistogram(3); Arrays.stream(values).forEach(state::recordValue); @@ -53,33 +58,33 @@ protected InternalHDRPercentileRanks mutateInstance(InternalHDRPercentileRanks i DocValueFormat formatter = instance.formatter(); Map metadata = instance.getMetadata(); switch (between(0, 4)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - percents = Arrays.copyOf(percents, percents.length + 1); - percents[percents.length - 1] = randomDouble() * 100; - Arrays.sort(percents); - break; - case 2: - state = new DoubleHistogram(state); - for (int i = 0; i < between(10, 100); i++) { - state.recordValue(randomDouble()); - } - break; - case 3: - keyed = keyed == false; - break; - case 4: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + percents = Arrays.copyOf(percents, percents.length + 1); + percents[percents.length - 1] = randomDouble() * 100; + Arrays.sort(percents); + break; + case 2: + state = new DoubleHistogram(state); + for (int i = 0; i < between(10, 100); i++) { + state.recordValue(randomDouble()); + } + break; + case 3: + keyed = keyed == false; + break; + case 4: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalHDRPercentileRanks(name, percents, state, keyed, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java index 70dc5494d5293..fcf36096d028a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java @@ -22,9 +22,14 @@ public class InternalHDRPercentilesTests extends InternalPercentilesTestCase { @Override - protected InternalHDRPercentiles createTestInstance(String name, - Map metadata, - boolean keyed, DocValueFormat format, double[] percents, double[] values) { + protected InternalHDRPercentiles createTestInstance( + String name, + Map metadata, + boolean keyed, + DocValueFormat format, + double[] percents, + double[] values + ) { final DoubleHistogram state = new DoubleHistogram(3); Arrays.stream(values).forEach(state::recordValue); @@ -48,14 +53,13 @@ protected Class implementationClass() { } public void testIterator() { - final double[] percents = randomPercents(false); + final double[] percents = randomPercents(false); final double[] values = new double[frequently() ? randomIntBetween(1, 10) : 0]; for (int i = 0; i < values.length; ++i) { values[i] = randomDouble(); } - InternalHDRPercentiles aggregation = - createTestInstance("test", emptyMap(), false, randomNumericDocValueFormat(), percents, values); + InternalHDRPercentiles aggregation = createTestInstance("test", emptyMap(), false, randomNumericDocValueFormat(), percents, values); Iterator iterator = aggregation.iterator(); Iterator nameIterator = aggregation.valueNames().iterator(); @@ -85,33 +89,33 @@ protected InternalHDRPercentiles mutateInstance(InternalHDRPercentiles instance) DocValueFormat formatter = instance.formatter(); Map metadata = instance.getMetadata(); switch (between(0, 4)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - percents = Arrays.copyOf(percents, percents.length + 1); - percents[percents.length - 1] = randomDouble() * 100; - Arrays.sort(percents); - break; - case 2: - state = new DoubleHistogram(state); - for (int i = 0; i < between(10, 100); i++) { - state.recordValue(randomDouble()); - } - break; - case 3: - keyed = keyed == false; - break; - case 4: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + percents = Arrays.copyOf(percents, percents.length + 1); + percents[percents.length - 1] = randomDouble() * 100; + Arrays.sort(percents); + break; + case 2: + state = new DoubleHistogram(state); + for (int i = 0; i < between(10, 100); i++) { + state.recordValue(randomDouble()); + } + break; + case 3: + keyed = keyed == false; + break; + case 4: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalHDRPercentiles(name, percents, state, keyed, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java index 4babdaef4a468..af87e422ddff0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java @@ -50,26 +50,26 @@ protected InternalMax mutateInstance(InternalMax instance) { DocValueFormat formatter = instance.format; Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(value)) { - value += between(1, 100); - } else { - value = between(1, 100); - } - break; - case 2: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(value)) { + value += between(1, 100); + } else { + value = between(1, 100); + } + break; + case 2: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalMax(name, value, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java index 8fe945e79746a..856f53da2f702 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java @@ -21,9 +21,7 @@ public class InternalMedianAbsoluteDeviationTests extends InternalAggregationTes @Override protected InternalMedianAbsoluteDeviation createTestInstance(String name, Map metadata) { final TDigestState valuesSketch = new TDigestState(randomDoubleBetween(20, 1000, true)); - final int numberOfValues = frequently() - ? randomIntBetween(0, 1000) - : 0; + final int numberOfValues = frequently() ? randomIntBetween(0, 1000) : 0; for (int i = 0; i < numberOfValues; i++) { valuesSketch.add(randomDouble()); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMinTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMinTests.java index 6e4dc7e68c3c2..f194c89927d18 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMinTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMinTests.java @@ -49,26 +49,26 @@ protected InternalMin mutateInstance(InternalMin instance) { DocValueFormat formatter = instance.format; Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(value)) { - value += between(1, 100); - } else { - value = between(1, 100); - } - break; - case 2: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(value)) { + value += between(1, 100); + } else { + value = between(1, 100); + } + break; + case 2: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalMin(name, value, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesRanksTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesRanksTestCase.java index f32e6ef83133b..2356e532ab534 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesRanksTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesRanksTestCase.java @@ -13,8 +13,8 @@ import static org.hamcrest.Matchers.equalTo; -public abstract class InternalPercentilesRanksTestCase - extends AbstractPercentilesTestCase { +public abstract class InternalPercentilesRanksTestCase extends AbstractPercentilesTestCase< + T> { @Override protected final void assertFromXContent(T aggregation, ParsedAggregation parsedAggregation) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java index 2c8d57c6d3bc6..558ee1285308e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java @@ -43,16 +43,21 @@ public class InternalScriptedMetricTests extends InternalAggregationTestCase[] valueTypes; - @SuppressWarnings({"rawtypes", "unchecked"}) - private final Supplier[] leafValueSuppliers = new Supplier[] { () -> randomInt(), () -> randomLong(), () -> randomDouble(), - () -> randomFloat(), () -> randomBoolean(), () -> randomAlphaOfLength(5), () -> new GeoPoint(randomDouble(), randomDouble()), - () -> null }; - @SuppressWarnings({"rawtypes", "unchecked"}) - private final Supplier[] nestedValueSuppliers = new Supplier[] { () -> new HashMap(), - () -> new ArrayList<>() }; + @SuppressWarnings({ "rawtypes", "unchecked" }) + private final Supplier[] leafValueSuppliers = new Supplier[] { + () -> randomInt(), + () -> randomLong(), + () -> randomDouble(), + () -> randomFloat(), + () -> randomBoolean(), + () -> randomAlphaOfLength(5), + () -> new GeoPoint(randomDouble(), randomDouble()), + () -> null }; + @SuppressWarnings({ "rawtypes", "unchecked" }) + private final Supplier[] nestedValueSuppliers = new Supplier[] { () -> new HashMap(), () -> new ArrayList<>() }; @Override - @SuppressWarnings({"rawtypes", "unchecked"}) + @SuppressWarnings({ "rawtypes", "unchecked" }) public void setUp() throws Exception { super.setUp(); hasReduceScript = randomBoolean(); @@ -112,7 +117,7 @@ private static Object randomValue(Supplier[] valueTypes, int level) { map.put(randomAlphaOfLength(5), randomValue(valueTypes, level + 1)); } } else if (value instanceof List) { - int elements = randomIntBetween(1,5); + int elements = randomIntBetween(1, 5); List list = (List) value; for (int i = 0; i < elements; i++) { list.add(randomValue(valueTypes, level + 1)); @@ -131,9 +136,11 @@ private static Object randomValue(Supplier[] valueTypes, int level) { protected ScriptService mockScriptService() { // mock script always returns the size of the input aggs list as result @SuppressWarnings("unchecked") - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, - Collections.singletonMap(REDUCE_SCRIPT_NAME, script -> ((List) script.get("states")).size()), - Collections.emptyMap()); + MockScriptEngine scriptEngine = new MockScriptEngine( + MockScriptEngine.NAME, + Collections.singletonMap(REDUCE_SCRIPT_NAME, script -> ((List) script.get("states")).size()), + Collections.emptyMap() + ); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); } @@ -226,25 +233,30 @@ protected InternalScriptedMetric mutateInstance(InternalScriptedMetric instance) Script reduceScript = instance.reduceScript; Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - aggregationsList = randomValueOtherThan(aggregationsList, this::randomAggregations); - break; - case 2: - reduceScript = new Script(ScriptType.INLINE, MockScriptEngine.NAME, REDUCE_SCRIPT_NAME + "-mutated", Collections.emptyMap()); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + aggregationsList = randomValueOtherThan(aggregationsList, this::randomAggregations); + break; + case 2: + reduceScript = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + REDUCE_SCRIPT_NAME + "-mutated", + Collections.emptyMap() + ); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalScriptedMetric(name, aggregationsList, reduceScript, metadata); } @@ -262,12 +274,15 @@ public void testOldSerialization() throws IOException { // A multi-element list looks like a non-reduced agg InternalScriptedMetric unreduced = new InternalScriptedMetric("test", List.of("foo", "bar"), new Script("test"), null); - Exception e = expectThrows(IllegalArgumentException.class, () -> copyNamedWriteable( - unreduced, - getNamedWriteableRegistry(), - InternalAggregation.class, - VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, VersionUtils.getPreviousVersion(Version.V_7_8_0)) - )); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> copyNamedWriteable( + unreduced, + getNamedWriteableRegistry(), + InternalAggregation.class, + VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, VersionUtils.getPreviousVersion(Version.V_7_8_0)) + ) + ); assertThat(e.getMessage(), equalTo("scripted_metric doesn't support cross cluster search until 7.8.0")); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java index 2b84e6661ed91..40550e76fa188 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java @@ -19,8 +19,15 @@ public class InternalStatsBucketTests extends InternalStatsTests { @Override - protected InternalStatsBucket createInstance(String name, long count, double sum, double min, double max, - DocValueFormat formatter, Map metadata) { + protected InternalStatsBucket createInstance( + String name, + long count, + double sum, + double min, + double max, + DocValueFormat formatter, + Map metadata + ) { return new InternalStatsBucket(name, count, sum, min, max, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java index 3baea7cbd6f53..5a7ae9b13d9cd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java @@ -38,8 +38,15 @@ protected InternalStats createTestInstance(String name, Map meta return createInstance(name, count, sum, min, max, format, metadata); } - protected InternalStats createInstance(String name, long count, double sum, double min, double max, DocValueFormat formatter, - Map metadata) { + protected InternalStats createInstance( + String name, + long count, + double sum, + double min, + double max, + DocValueFormat formatter, + Map metadata + ) { return new InternalStats(name, count, sum, min, max, formatter, metadata); } @@ -66,7 +73,7 @@ protected void assertReduced(InternalStats reduced, List inputs) } public void testSummationAccuracy() { - double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7}; + double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifyStatsOfDoubles(values, 13.5, 0.9, 0d); int n = randomIntBetween(5, 10); @@ -123,7 +130,7 @@ protected void assertFromXContent(InternalStats aggregation, ParsedAggregation p static void assertStats(InternalStats aggregation, ParsedStats parsed) { long count = aggregation.getCount(); assertEquals(count, parsed.getCount()); - // for count == 0, fields are rendered as `null`, so we test that we parse to default values used also in the reduce phase + // for count == 0, fields are rendered as `null`, so we test that we parse to default values used also in the reduce phase assertEquals(count > 0 ? aggregation.getMin() : Double.POSITIVE_INFINITY, parsed.getMin(), 0); assertEquals(count > 0 ? aggregation.getMax() : Double.NEGATIVE_INFINITY, parsed.getMax(), 0); assertEquals(count > 0 ? aggregation.getSum() : 0, parsed.getSum(), 0); @@ -147,47 +154,47 @@ protected InternalStats mutateInstance(InternalStats instance) { DocValueFormat formatter = instance.format; Map metadata = instance.getMetadata(); switch (between(0, 5)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(count)) { - count += between(1, 100); - } else { - count = between(1, 100); - } - break; - case 2: - if (Double.isFinite(sum)) { - sum += between(1, 100); - } else { - sum = between(1, 100); - } - break; - case 3: - if (Double.isFinite(min)) { - min += between(1, 100); - } else { - min = between(1, 100); - } - break; - case 4: - if (Double.isFinite(max)) { - max += between(1, 100); - } else { - max = between(1, 100); - } - break; - case 5: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(count)) { + count += between(1, 100); + } else { + count = between(1, 100); + } + break; + case 2: + if (Double.isFinite(sum)) { + sum += between(1, 100); + } else { + sum = between(1, 100); + } + break; + case 3: + if (Double.isFinite(min)) { + min += between(1, 100); + } else { + min = between(1, 100); + } + break; + case 4: + if (Double.isFinite(max)) { + max += between(1, 100); + } else { + max = between(1, 100); + } + break; + case 5: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalStats(name, count, sum, min, max, formatter, metadata); } @@ -205,18 +212,35 @@ public void testDoXContentBody() throws IOException { internalStats.doXContentBody(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - String expected = "{\n" + - " \"count\" : " + count + ",\n" + - " \"min\" : " + min + ",\n" + - " \"max\" : " + max + ",\n" + - " \"avg\" : " + internalStats.getAvg() + ",\n" + - " \"sum\" : " + sum; + String expected = "{\n" + + " \"count\" : " + + count + + ",\n" + + " \"min\" : " + + min + + ",\n" + + " \"max\" : " + + max + + ",\n" + + " \"avg\" : " + + internalStats.getAvg() + + ",\n" + + " \"sum\" : " + + sum; if (format != DocValueFormat.RAW) { - expected += ",\n"+ - " \"min_as_string\" : \"" + format.format(internalStats.getMin()) + "\",\n" + - " \"max_as_string\" : \"" + format.format(internalStats.getMax()) + "\",\n" + - " \"avg_as_string\" : \"" + format.format(internalStats.getAvg()) + "\",\n" + - " \"sum_as_string\" : \"" + format.format(internalStats.getSum()) + "\""; + expected += ",\n" + + " \"min_as_string\" : \"" + + format.format(internalStats.getMin()) + + "\",\n" + + " \"max_as_string\" : \"" + + format.format(internalStats.getMax()) + + "\",\n" + + " \"avg_as_string\" : \"" + + format.format(internalStats.getAvg()) + + "\",\n" + + " \"sum_as_string\" : \"" + + format.format(internalStats.getSum()) + + "\""; } expected += "\n}"; assertEquals(expected, Strings.toString(builder)); @@ -233,13 +257,16 @@ public void testDoXContentBody() throws IOException { internalStats.doXContentBody(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertEquals("{\n" + - " \"count\" : 0,\n" + - " \"min\" : null,\n" + - " \"max\" : null,\n" + - " \"avg\" : null,\n" + - " \"sum\" : 0.0\n" + - "}", Strings.toString(builder)); + assertEquals( + "{\n" + + " \"count\" : 0,\n" + + " \"min\" : null,\n" + + " \"max\" : null,\n" + + " \"avg\" : null,\n" + + " \"sum\" : 0.0\n" + + "}", + Strings.toString(builder) + ); } public void testIterator() { @@ -254,4 +281,3 @@ public void testIterator() { assertTrue(names.contains("sum")); } } - diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalSumTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalSumTests.java index ae555125f5f75..66deadb47c9b3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalSumTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalSumTests.java @@ -34,7 +34,7 @@ protected void assertReduced(InternalSum reduced, List inputs) { public void testSummationAccuracy() { // Summing up a normal array and expect an accurate value - double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7}; + double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifySummationOfDoubles(values, 13.5, 0d); // Summing up an array which contains NaN and infinities and expect a result same as naive summation @@ -87,26 +87,26 @@ protected InternalSum mutateInstance(InternalSum instance) { DocValueFormat formatter = instance.format; Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(value)) { - value += between(1, 100); - } else { - value = between(1, 100); - } - break; - case 2: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(value)) { + value += between(1, 100); + } else { + value = between(1, 100); + } + break; + case 2: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalSum(name, value, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java index bf5374aac6bc1..b9fe3ed87c60b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java @@ -18,8 +18,14 @@ public class InternalTDigestPercentilesRanksTests extends InternalPercentilesRanksTestCase { @Override - protected InternalTDigestPercentileRanks createTestInstance(String name, Map metadata, - boolean keyed, DocValueFormat format, double[] percents, double[] values) { + protected InternalTDigestPercentileRanks createTestInstance( + String name, + Map metadata, + boolean keyed, + DocValueFormat format, + double[] percents, + double[] values + ) { final TDigestState state = new TDigestState(100); Arrays.stream(values).forEach(state::add); @@ -64,35 +70,35 @@ protected InternalTDigestPercentileRanks mutateInstance(InternalTDigestPercentil DocValueFormat formatter = instance.formatter(); Map metadata = instance.getMetadata(); switch (between(0, 4)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - percents = Arrays.copyOf(percents, percents.length + 1); - percents[percents.length - 1] = randomDouble() * 100; - Arrays.sort(percents); - break; - case 2: - TDigestState newState = new TDigestState(state.compression()); - newState.add(state); - for (int i = 0; i < between(10, 100); i++) { - newState.add(randomDouble()); - } - state = newState; - break; - case 3: - keyed = keyed == false; - break; - case 4: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + percents = Arrays.copyOf(percents, percents.length + 1); + percents[percents.length - 1] = randomDouble() * 100; + Arrays.sort(percents); + break; + case 2: + TDigestState newState = new TDigestState(state.compression()); + newState.add(state); + for (int i = 0; i < between(10, 100); i++) { + newState.add(randomDouble()); + } + state = newState; + break; + case 3: + keyed = keyed == false; + break; + case 4: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalTDigestPercentileRanks(name, percents, state, keyed, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java index 863f12f710353..352cf3f7168f3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java @@ -21,9 +21,14 @@ public class InternalTDigestPercentilesTests extends InternalPercentilesTestCase { @Override - protected InternalTDigestPercentiles createTestInstance(String name, - Map metadata, - boolean keyed, DocValueFormat format, double[] percents, double[] values) { + protected InternalTDigestPercentiles createTestInstance( + String name, + Map metadata, + boolean keyed, + DocValueFormat format, + double[] percents, + double[] values + ) { final TDigestState state = new TDigestState(100); Arrays.stream(values).forEach(state::add); @@ -63,48 +68,54 @@ protected InternalTDigestPercentiles mutateInstance(InternalTDigestPercentiles i DocValueFormat formatter = instance.formatter(); Map metadata = instance.getMetadata(); switch (between(0, 4)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - percents = Arrays.copyOf(percents, percents.length + 1); - percents[percents.length - 1] = randomDouble() * 100; - Arrays.sort(percents); - break; - case 2: - TDigestState newState = new TDigestState(state.compression()); - newState.add(state); - for (int i = 0; i < between(10, 100); i++) { - newState.add(randomDouble()); - } - state = newState; - break; - case 3: - keyed = keyed == false; - break; - case 4: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + percents = Arrays.copyOf(percents, percents.length + 1); + percents[percents.length - 1] = randomDouble() * 100; + Arrays.sort(percents); + break; + case 2: + TDigestState newState = new TDigestState(state.compression()); + newState.add(state); + for (int i = 0; i < between(10, 100); i++) { + newState.add(randomDouble()); + } + state = newState; + break; + case 3: + keyed = keyed == false; + break; + case 4: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalTDigestPercentiles(name, percents, state, keyed, formatter, metadata); } public void testIterator() { - final double[] percents = randomPercents(false); + final double[] percents = randomPercents(false); final double[] values = new double[frequently() ? randomIntBetween(1, 10) : 0]; for (int i = 0; i < values.length; ++i) { values[i] = randomDouble(); } - InternalTDigestPercentiles aggregation = - createTestInstance("test", emptyMap(), false, randomNumericDocValueFormat(), percents, values); + InternalTDigestPercentiles aggregation = createTestInstance( + "test", + emptyMap(), + false, + randomNumericDocValueFormat(), + percents, + values + ); Iterator iterator = aggregation.iterator(); Iterator nameIterator = aggregation.valueNames().iterator(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 90da0a9c143d7..61e8de696233f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -18,13 +18,13 @@ import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.core.Tuple; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.ParsedAggregation; @@ -57,8 +57,14 @@ public class InternalTopHitsTests extends InternalAggregationTestCase metadata) { if (randomBoolean()) { - return createTestInstanceSortedByFields(name, between(1, 40), metadata, ESTestCase::randomFloat, - randomSortFields(), InternalTopHitsTests::randomOfType); + return createTestInstanceSortedByFields( + name, + between(1, 40), + metadata, + ESTestCase::randomFloat, + randomSortFields(), + InternalTopHitsTests::randomOfType + ); } return createTestInstanceSortedScore(name, between(1, 40), metadata, ESTestCase::randomFloat); } @@ -92,19 +98,22 @@ protected List randomResultsToReduce(String name, int size) { return Stream.generate(supplier).limit(size).collect(toList()); } - private InternalTopHits createTestInstanceSortedByFields(String name, int requestedSize, Map metadata, - Supplier scoreSupplier, SortField[] sortFields, Function sortFieldValueSupplier) { - return createTestInstance(name, metadata, scoreSupplier, requestedSize, - (docId, score) -> { - Object[] fields = new Object[sortFields.length]; - for (int f = 0; f < sortFields.length; f++) { - final int ff = f; - fields[f] = sortFieldValueSupplier.apply(sortFields[ff].getType()); - } - return new FieldDoc(docId, score, fields); - }, - (totalHits, scoreDocs) -> new TopFieldDocs(totalHits, scoreDocs, sortFields), - sortFieldsComparator(sortFields)); + private InternalTopHits createTestInstanceSortedByFields( + String name, + int requestedSize, + Map metadata, + Supplier scoreSupplier, + SortField[] sortFields, + Function sortFieldValueSupplier + ) { + return createTestInstance(name, metadata, scoreSupplier, requestedSize, (docId, score) -> { + Object[] fields = new Object[sortFields.length]; + for (int f = 0; f < sortFields.length; f++) { + final int ff = f; + fields[f] = sortFieldValueSupplier.apply(sortFields[ff].getType()); + } + return new FieldDoc(docId, score, fields); + }, (totalHits, scoreDocs) -> new TopFieldDocs(totalHits, scoreDocs, sortFields), sortFieldsComparator(sortFields)); } private InternalTopHits createTestInstanceSortedScore( @@ -116,10 +125,15 @@ private InternalTopHits createTestInstanceSortedScore( return createTestInstance(name, metadata, scoreSupplier, requestedSize, ScoreDoc::new, TopDocs::new, scoreComparator()); } - private InternalTopHits createTestInstance(String name, Map metadata, Supplier scoreSupplier, - int requestedSize, - BiFunction docBuilder, - BiFunction topDocsBuilder, Comparator comparator) { + private InternalTopHits createTestInstance( + String name, + Map metadata, + Supplier scoreSupplier, + int requestedSize, + BiFunction docBuilder, + BiFunction topDocsBuilder, + Comparator comparator + ) { int from = 0; int actualSize = between(0, requestedSize); @@ -195,28 +209,28 @@ protected void assertFromXContent(InternalTopHits aggregation, ParsedAggregation private static Object randomOfType(SortField.Type type) { switch (type) { - case CUSTOM: - throw new UnsupportedOperationException(); - case DOC: - return between(0, IndexWriter.MAX_DOCS); - case DOUBLE: - return randomDouble(); - case FLOAT: - return randomFloat(); - case INT: - return randomInt(); - case LONG: - return randomLong(); - case REWRITEABLE: - throw new UnsupportedOperationException(); - case SCORE: - return randomFloat(); - case STRING: - return new BytesRef(randomAlphaOfLength(5)); - case STRING_VAL: - return new BytesRef(randomAlphaOfLength(5)); - default: - throw new UnsupportedOperationException("Unknown SortField.Type: " + type); + case CUSTOM: + throw new UnsupportedOperationException(); + case DOC: + return between(0, IndexWriter.MAX_DOCS); + case DOUBLE: + return randomDouble(); + case FLOAT: + return randomFloat(); + case INT: + return randomInt(); + case LONG: + return randomLong(); + case REWRITEABLE: + throw new UnsupportedOperationException(); + case SCORE: + return randomFloat(); + case STRING: + return new BytesRef(randomAlphaOfLength(5)); + case STRING_VAL: + return new BytesRef(randomAlphaOfLength(5)); + default: + throw new UnsupportedOperationException("Unknown SortField.Type: " + type); } } @@ -258,9 +272,11 @@ protected void assertReduced(InternalTopHits reduced, List inpu expectedHitsHits[i] = allHits.get(i).v2(); } // Lucene's TopDocs initializes the maxScore to Float.NaN, if there is no maxScore - SearchHits expectedHits = new SearchHits(expectedHitsHits, new TotalHits(totalHits, relation), maxScore == Float.NEGATIVE_INFINITY ? - Float.NaN : - maxScore); + SearchHits expectedHits = new SearchHits( + expectedHitsHits, + new TotalHits(totalHits, relation), + maxScore == Float.NEGATIVE_INFINITY ? Float.NaN : maxScore + ); assertEqualsWithErrorMessageFromXContent(expectedHits, actualHits); } @@ -278,8 +294,7 @@ private static void assertEqualsWithErrorMessageFromXCont if (actual == null) { throw new AssertionError("Didn't expect null but actual was [null]"); } - try (XContentBuilder actualJson = JsonXContent.contentBuilder(); - XContentBuilder expectedJson = JsonXContent.contentBuilder()) { + try (XContentBuilder actualJson = JsonXContent.contentBuilder(); XContentBuilder expectedJson = JsonXContent.contentBuilder()) { actualJson.startObject(); actual.toXContent(actualJson, ToXContent.EMPTY_PARAMS); actualJson.endObject(); @@ -289,7 +304,8 @@ private static void assertEqualsWithErrorMessageFromXCont NotEqualMessageBuilder message = new NotEqualMessageBuilder(); message.compareMaps( XContentHelper.convertToMap(BytesReference.bytes(actualJson), false).v2(), - XContentHelper.convertToMap(BytesReference.bytes(expectedJson), false).v2()); + XContentHelper.convertToMap(BytesReference.bytes(expectedJson), false).v2() + ); throw new AssertionError("Didn't match expected value:\n" + message); } catch (IOException e) { throw new AssertionError("IOException while building failure message", e); @@ -302,8 +318,10 @@ private SortField[] randomSortFields() { for (int i = 0; i < sortFields.length; i++) { String sortField = randomValueOtherThanMany(usedSortFields::contains, () -> randomAlphaOfLength(5)); usedSortFields.add(sortField); - SortField.Type type = randomValueOtherThanMany(t -> t == SortField.Type.CUSTOM || t == SortField.Type.REWRITEABLE, - () -> randomFrom(SortField.Type.values())); + SortField.Type type = randomValueOtherThanMany( + t -> t == SortField.Type.CUSTOM || t == SortField.Type.REWRITEABLE, + () -> randomFrom(SortField.Type.values()) + ); sortFields[i] = new SortField(sortField, type); } return sortFields; @@ -346,33 +364,41 @@ protected InternalTopHits mutateInstance(InternalTopHits instance) { SearchHits searchHits = instance.getHits(); Map metadata = instance.getMetadata(); switch (between(0, 5)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - from += between(1, 100); - break; - case 2: - size += between(1, 100); - break; - case 3: - topDocs = new TopDocsAndMaxScore(new TopDocs(new TotalHits(topDocs.topDocs.totalHits.value + between(1, 100), - topDocs.topDocs.totalHits.relation), topDocs.topDocs.scoreDocs), topDocs.maxScore + randomFloat()); - break; - case 4: - TotalHits totalHits = new TotalHits(searchHits.getTotalHits().value + between(1, 100), randomFrom(TotalHits.Relation.values())); - searchHits = new SearchHits(searchHits.getHits(), totalHits, searchHits.getMaxScore() + randomFloat()); - break; - case 5: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + from += between(1, 100); + break; + case 2: + size += between(1, 100); + break; + case 3: + topDocs = new TopDocsAndMaxScore( + new TopDocs( + new TotalHits(topDocs.topDocs.totalHits.value + between(1, 100), topDocs.topDocs.totalHits.relation), + topDocs.topDocs.scoreDocs + ), + topDocs.maxScore + randomFloat() + ); + break; + case 4: + TotalHits totalHits = new TotalHits( + searchHits.getTotalHits().value + between(1, 100), + randomFrom(TotalHits.Relation.values()) + ); + searchHits = new SearchHits(searchHits.getHits(), totalHits, searchHits.getMaxScore() + randomFloat()); + break; + case 5: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalTopHits(name, from, size, topDocs, searchHits, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java index aa88479d7459e..e3e75f9aacbde 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java @@ -39,26 +39,26 @@ protected InternalValueCount mutateInstance(InternalValueCount instance) { long value = instance.getValue(); Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(value)) { - value += between(1, 100); - } else { - value = between(1, 100); - } - break; - case 2: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(value)) { + value += between(1, 100); + } else { + value = between(1, 100); + } + break; + case 2: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalValueCount(name, value, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvgTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvgTests.java index f4629bedf45c5..7ceee2569b175 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvgTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvgTests.java @@ -25,7 +25,9 @@ protected InternalWeightedAvg createTestInstance(String name, Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(sum)) { - sum += between(1, 100); - } else { - sum = between(1, 100); - } - break; - case 2: - if (Double.isFinite(weight)) { - weight += between(1, 100); - } else { - weight = between(1, 100); - } - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(sum)) { + sum += between(1, 100); + } else { + sum = between(1, 100); + } + break; + case 2: + if (Double.isFinite(weight)) { + weight += between(1, 100); + } else { + weight = between(1, 100); + } + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalWeightedAvg(name, sum, weight, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java index 97039b2010a39..23039bd599b01 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java @@ -34,9 +34,9 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.FutureArrays; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Tuple; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.script.MockScriptEngine; @@ -141,10 +141,12 @@ protected ScriptService getMockScriptService() { Map, Object>> nonDeterministicScripts = new HashMap<>(); nonDeterministicScripts.put(RANDOM_SCRIPT, vars -> MaxAggregatorTests.randomDouble()); - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, + MockScriptEngine scriptEngine = new MockScriptEngine( + MockScriptEngine.NAME, scripts, nonDeterministicScripts, - Collections.emptyMap()); + Collections.emptyMap() + ); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); @@ -152,15 +154,12 @@ protected ScriptService getMockScriptService() { @Override protected List getSupportedValuesSourceTypes() { - return List.of(CoreValuesSourceType.NUMERIC, - CoreValuesSourceType.BOOLEAN, - CoreValuesSourceType.DATE); + return List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.DATE); } @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new MaxAggregationBuilder("_name") - .field(fieldName); + return new MaxAggregationBuilder("_name").field(fieldName); } public void testNoDocs() throws IOException { @@ -262,8 +261,7 @@ public void testMissingFieldOptimization() throws IOException { public void testScript() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - AggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name") - .field("number") + AggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SCRIPT_NAME, Collections.emptyMap())); testAggregation(aggregationBuilder, new DocValuesFieldExistsQuery("number"), iw -> { @@ -275,9 +273,8 @@ public void testScript() throws IOException { }, fieldType); } - private void testAggregation(Query query, - CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testAggregation(Query query, CheckedConsumer buildIndex, Consumer verify) + throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number"); testAggregation(aggregationBuilder, query, buildIndex, verify, fieldType); @@ -297,27 +294,25 @@ public void testMaxShortcutRandom() throws Exception { testMaxShortcutCase( () -> randomLongBetween(Integer.MIN_VALUE, Integer.MAX_VALUE), (n) -> new LongPoint("number", n.longValue()), - (v) -> LongPoint.decodeDimension(v, 0)); + (v) -> LongPoint.decodeDimension(v, 0) + ); - testMaxShortcutCase( - () -> randomInt(), - (n) -> new IntPoint("number", n.intValue()), - (v) -> IntPoint.decodeDimension(v, 0)); + testMaxShortcutCase(() -> randomInt(), (n) -> new IntPoint("number", n.intValue()), (v) -> IntPoint.decodeDimension(v, 0)); - testMaxShortcutCase( - () -> randomFloat(), - (n) -> new FloatPoint("number", n.floatValue()), - (v) -> FloatPoint.decodeDimension(v, 0)); + testMaxShortcutCase(() -> randomFloat(), (n) -> new FloatPoint("number", n.floatValue()), (v) -> FloatPoint.decodeDimension(v, 0)); testMaxShortcutCase( () -> randomDouble(), (n) -> new DoublePoint("number", n.doubleValue()), - (v) -> DoublePoint.decodeDimension(v, 0)); + (v) -> DoublePoint.decodeDimension(v, 0) + ); } - private void testMaxShortcutCase(Supplier randomNumber, - Function pointFieldFunc, - Function pointConvertFunc) throws IOException { + private void testMaxShortcutCase( + Supplier randomNumber, + Function pointFieldFunc, + Function pointConvertFunc + ) throws IOException { Directory directory = newDirectory(); IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE); IndexWriter indexWriter = new IndexWriter(directory, config); @@ -334,7 +329,7 @@ private void testMaxShortcutCase(Supplier randomNumber, document.add(new StringField("id", Integer.toString(docID), Field.Store.NO)); document.add(pointFieldFunc.apply(nextValue)); documents.add(document); - docID ++; + docID++; } } // insert some documents without a value for the metric field. @@ -346,14 +341,14 @@ private void testMaxShortcutCase(Supplier randomNumber, Collections.sort(values, Comparator.comparingDouble(t -> t.v2().doubleValue())); try (IndexReader reader = DirectoryReader.open(indexWriter)) { LeafReaderContext ctx = reader.leaves().get(0); - Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc); - assertThat(res, equalTo(values.get(values.size()-1).v2())); + Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number", pointConvertFunc); + assertThat(res, equalTo(values.get(values.size() - 1).v2())); } - for (int i = values.size()-1; i > 0; i--) { + for (int i = values.size() - 1; i > 0; i--) { indexWriter.deleteDocuments(new Term("id", values.get(i).v1().toString())); try (IndexReader reader = DirectoryReader.open(indexWriter)) { LeafReaderContext ctx = reader.leaves().get(0); - Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc); + Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number", pointConvertFunc); if (res != null) { assertThat(res, equalTo(values.get(i - 1).v2())); } else { @@ -364,7 +359,7 @@ private void testMaxShortcutCase(Supplier randomNumber, indexWriter.deleteDocuments(new Term("id", values.get(0).v1().toString())); try (IndexReader reader = DirectoryReader.open(indexWriter)) { LeafReaderContext ctx = reader.leaves().get(0); - Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc); + Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number", pointConvertFunc); assertThat(res, equalTo(null)); } indexWriter.close(); @@ -390,7 +385,7 @@ public void visit(int docID, byte[] packedValue) { @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - if (FutureArrays.equals(maxPackedValue, 0, numBytes, maxValue, 0, numBytes)) { + if (FutureArrays.equals(maxPackedValue, 0, numBytes, maxValue, 0, numBytes)) { return PointValues.Relation.CELL_CROSSES_QUERY; } return PointValues.Relation.CELL_OUTSIDE_QUERY; @@ -400,7 +395,7 @@ public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue } public void testSingleValuedField() throws IOException { - testAggregation( new MatchAllDocsQuery(), iw -> { + testAggregation(new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("number", i + 1))); @@ -414,11 +409,9 @@ public void testSingleValuedField() throws IOException { public void testSingleValuedFieldWithFormatter() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); - MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name") - .format("0000.0") - .field("value"); + MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").format("0000.0").field("value"); - testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("value", i + 1))); @@ -464,8 +457,8 @@ public void testSingleValuedFieldGetProperty() throws IOException { assertEquals("max", max.getName()); assertEquals(10.0, max.getValue(), 0); assertEquals(max, ((InternalAggregation) global).getProperty("max")); - assertEquals(10.0, (double) ((InternalAggregation)global).getProperty("max.value"), 0); - assertEquals(10.0, (double) ((InternalAggregation)max).getProperty("value"), 0); + assertEquals(10.0, (double) ((InternalAggregation) global).getProperty("max.value"), 0); + assertEquals(10.0, (double) ((InternalAggregation) max).getProperty("value"), 0); indexReader.close(); directory.close(); @@ -489,7 +482,6 @@ public void testSingleValuedFieldPartiallyUnmapped() throws IOException { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); AggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value"); @@ -512,8 +504,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws IOException { public void testSingleValuedFieldWithValueScript() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); - MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") - .field("value") + MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -532,8 +523,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws IOException MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); Map params = Collections.singletonMap("inc", 1); - MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") - .field("value") + MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, params)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -566,8 +556,7 @@ public void testMultiValuedField() throws IOException { public void testMultiValuedFieldWithValueScript() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); - MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") - .field("values") + MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("values") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -588,8 +577,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); Map params = Collections.singletonMap("inc", 1); - MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") - .field("values") + MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("values") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, params)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -609,8 +597,9 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws IOException { public void testScriptSingleValued() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); - MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_FIELD_SCRIPT, Collections.emptyMap())); + MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_FIELD_SCRIPT, Collections.emptyMap()) + ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -630,8 +619,9 @@ public void testScriptSingleValuedWithParams() throws IOException { params.put("inc", 1); params.put("field", "value"); - MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params)); + MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params) + ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -647,8 +637,9 @@ public void testScriptSingleValuedWithParams() throws IOException { public void testScriptMultiValued() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); - MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_VALUES_FIELD_SCRIPT, Collections.emptyMap())); + MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_VALUES_FIELD_SCRIPT, Collections.emptyMap()) + ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -671,8 +662,9 @@ public void testScriptMultiValuedWithParams() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); - MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params)); + MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params) + ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -729,8 +721,9 @@ public void testOrderByEmptyAggregation() throws IOException { TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.NUMERIC) .field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>max", true))) - .subAggregation(AggregationBuilders.filter("filter", termQuery("value", 100)) - .subAggregation(AggregationBuilders.max("max").field("value"))); + .subAggregation( + AggregationBuilders.filter("filter", termQuery("value", 100)).subAggregation(AggregationBuilders.max("max").field("value")) + ); Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); @@ -790,9 +783,8 @@ public void testEarlyTermination() throws Exception { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - MaxAggregationBuilder maxAggregationBuilder = new MaxAggregationBuilder("max") .field("values"); - ValueCountAggregationBuilder countAggregationBuilder = new ValueCountAggregationBuilder("count") - .field("values"); + MaxAggregationBuilder maxAggregationBuilder = new MaxAggregationBuilder("max").field("values"); + ValueCountAggregationBuilder countAggregationBuilder = new ValueCountAggregationBuilder("count").field("values"); MaxAggregator maxAggregator = createAggregator(maxAggregationBuilder, indexSearcher, fieldType); ValueCountAggregator countAggregator = createAggregator(countAggregationBuilder, indexSearcher, fieldType); @@ -836,13 +828,11 @@ public void testNestedEarlyTermination() throws Exception { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); for (Aggregator.SubAggCollectionMode collectionMode : Aggregator.SubAggCollectionMode.values()) { - MaxAggregationBuilder maxAggregationBuilder = new MaxAggregationBuilder("max") - .field("values"); - ValueCountAggregationBuilder countAggregationBuilder = new ValueCountAggregationBuilder("count") - .field("values"); - TermsAggregationBuilder termsAggregationBuilder = new TermsAggregationBuilder("terms") - .userValueTypeHint(ValueType.NUMERIC) - .field("value").collectMode(collectionMode) + MaxAggregationBuilder maxAggregationBuilder = new MaxAggregationBuilder("max").field("values"); + ValueCountAggregationBuilder countAggregationBuilder = new ValueCountAggregationBuilder("count").field("values"); + TermsAggregationBuilder termsAggregationBuilder = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.NUMERIC) + .field("value") + .collectMode(collectionMode) .subAggregation(new MaxAggregationBuilder("sub_max").field("invalid")); MaxAggregator maxAggregator = createAggregator(maxAggregationBuilder, indexSearcher, multiValuesfieldType); @@ -901,10 +891,8 @@ public void testCacheAggregation() throws IOException { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); - MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") - .field("value"); + MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value"); AggregationContext context = createAggregationContext(indexSearcher, null, fieldType); MaxAggregator aggregator = createAggregator(aggregationBuilder, context); @@ -949,8 +937,7 @@ public void testScriptCaching() throws Exception { IndexSearcher indexSearcher = newSearcher(multiReader, true, true); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); - MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") - .field("value") + MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); AggregationContext context = createAggregationContext(indexSearcher, null, fieldType); @@ -968,8 +955,7 @@ public void testScriptCaching() throws Exception { // Test that an aggregation using a script does not get cached assertTrue(context.isCacheable()); - aggregationBuilder = new MaxAggregationBuilder("max") - .field("value") + aggregationBuilder = new MaxAggregationBuilder("max").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, RANDOM_SCRIPT, Collections.emptyMap())); context = createAggregationContext(indexSearcher, null, fieldType); aggregator = createAggregator(aggregationBuilder, context); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java index 97f4c5564d4ec..bbd209ea06b8b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java @@ -16,8 +16,8 @@ import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.script.MockScriptEngine; @@ -62,8 +62,9 @@ public class MedianAbsoluteDeviationAggregatorTests extends AggregatorTestCase { private static final String SINGLE_SCRIPT = "single"; private static CheckedConsumer randomSample( - int size, - Function> field) { + int size, + Function> field + ) { return writer -> { for (int i = 0; i < size; i++) { @@ -83,101 +84,82 @@ public void testNoDocs() throws IOException { } public void testNoMatchingField() throws IOException { - testAggregation( - new MatchAllDocsQuery(), - writer -> { - writer.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1))); - writer.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 2))); - }, - agg -> { - assertThat(agg.getMedianAbsoluteDeviation(), equalTo(Double.NaN)); - assertFalse(AggregationInspectionHelper.hasValue(agg)); - } - ); + testAggregation(new MatchAllDocsQuery(), writer -> { + writer.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1))); + writer.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 2))); + }, agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), equalTo(Double.NaN)); + assertFalse(AggregationInspectionHelper.hasValue(agg)); + }); } public void testSomeMatchesSortedNumericDocValues() throws IOException { final int size = randomIntBetween(100, 1000); final List sample = new ArrayList<>(size); - testAggregation( - new DocValuesFieldExistsQuery(FIELD_NAME), - randomSample(size, point -> { - sample.add(point); - return singleton(new SortedNumericDocValuesField(FIELD_NAME, point)); - }), - agg -> { - assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(sample))); - assertTrue(AggregationInspectionHelper.hasValue(agg)); - } - ); + testAggregation(new DocValuesFieldExistsQuery(FIELD_NAME), randomSample(size, point -> { + sample.add(point); + return singleton(new SortedNumericDocValuesField(FIELD_NAME, point)); + }), agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(sample))); + assertTrue(AggregationInspectionHelper.hasValue(agg)); + }); } public void testSomeMatchesNumericDocValues() throws IOException { final int size = randomIntBetween(100, 1000); final List sample = new ArrayList<>(size); - testAggregation( - new DocValuesFieldExistsQuery(FIELD_NAME), - randomSample(size, point -> { - sample.add(point); - return singleton(new NumericDocValuesField(FIELD_NAME, point)); - }), - agg -> { - assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(sample))); - assertTrue(AggregationInspectionHelper.hasValue(agg)); - } - ); + testAggregation(new DocValuesFieldExistsQuery(FIELD_NAME), randomSample(size, point -> { + sample.add(point); + return singleton(new NumericDocValuesField(FIELD_NAME, point)); + }), agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(sample))); + assertTrue(AggregationInspectionHelper.hasValue(agg)); + }); } public void testQueryFiltering() throws IOException { final int lowerRange = 1; final int upperRange = 500; final int[] sample = IntStream.rangeClosed(1, 1000).toArray(); - final int[] filteredSample = Arrays.stream(sample).filter(point -> point >= lowerRange && point <= upperRange).toArray(); - testAggregation( - IntPoint.newRangeQuery(FIELD_NAME, lowerRange, upperRange), - writer -> { - for (int point : sample) { - writer.addDocument(Arrays.asList(new IntPoint(FIELD_NAME, point), new SortedNumericDocValuesField(FIELD_NAME, point))); - } - }, - agg -> { - assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(filteredSample))); - assertTrue(AggregationInspectionHelper.hasValue(agg)); + final int[] filteredSample = Arrays.stream(sample).filter(point -> point >= lowerRange && point <= upperRange).toArray(); + testAggregation(IntPoint.newRangeQuery(FIELD_NAME, lowerRange, upperRange), writer -> { + for (int point : sample) { + writer.addDocument(Arrays.asList(new IntPoint(FIELD_NAME, point), new SortedNumericDocValuesField(FIELD_NAME, point))); } - ); + }, agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(filteredSample))); + assertTrue(AggregationInspectionHelper.hasValue(agg)); + }); } public void testQueryFiltersAll() throws IOException { - testAggregation( - IntPoint.newRangeQuery(FIELD_NAME, -1, 0), - writer -> { - writer.addDocument(Arrays.asList(new IntPoint(FIELD_NAME, 1), new SortedNumericDocValuesField(FIELD_NAME, 1))); - writer.addDocument(Arrays.asList(new IntPoint(FIELD_NAME, 2), new SortedNumericDocValuesField(FIELD_NAME, 2))); - }, - agg -> { - assertThat(agg.getMedianAbsoluteDeviation(), equalTo(Double.NaN)); - assertFalse(AggregationInspectionHelper.hasValue(agg)); - } - ); + testAggregation(IntPoint.newRangeQuery(FIELD_NAME, -1, 0), writer -> { + writer.addDocument(Arrays.asList(new IntPoint(FIELD_NAME, 1), new SortedNumericDocValuesField(FIELD_NAME, 1))); + writer.addDocument(Arrays.asList(new IntPoint(FIELD_NAME, 2), new SortedNumericDocValuesField(FIELD_NAME, 2))); + }, agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), equalTo(Double.NaN)); + assertFalse(AggregationInspectionHelper.hasValue(agg)); + }); } public void testUnmapped() throws IOException { - MedianAbsoluteDeviationAggregationBuilder aggregationBuilder = new MedianAbsoluteDeviationAggregationBuilder("foo") - .field(FIELD_NAME); + MedianAbsoluteDeviationAggregationBuilder aggregationBuilder = new MedianAbsoluteDeviationAggregationBuilder("foo").field( + FIELD_NAME + ); testAggregation(aggregationBuilder, new DocValuesFieldExistsQuery(FIELD_NAME), iw -> { iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 7))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 1))); }, agg -> { - assertEquals(Double.NaN, agg.getMedianAbsoluteDeviation(),0); + assertEquals(Double.NaN, agg.getMedianAbsoluteDeviation(), 0); assertFalse(AggregationInspectionHelper.hasValue(agg)); }); } public void testUnmappedMissing() throws IOException { - MedianAbsoluteDeviationAggregationBuilder aggregationBuilder = new MedianAbsoluteDeviationAggregationBuilder("foo") - .field(FIELD_NAME) - .missing(1234); + MedianAbsoluteDeviationAggregationBuilder aggregationBuilder = new MedianAbsoluteDeviationAggregationBuilder("foo").field( + FIELD_NAME + ).missing(1234); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("unrelatedField", 7))); @@ -190,59 +172,51 @@ public void testUnmappedMissing() throws IOException { } public void testValueScript() throws IOException { - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberFieldMapper.NumberType.LONG); - MedianAbsoluteDeviationAggregationBuilder aggregationBuilder = new MedianAbsoluteDeviationAggregationBuilder("foo") - .field(FIELD_NAME) - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); + MedianAbsoluteDeviationAggregationBuilder aggregationBuilder = new MedianAbsoluteDeviationAggregationBuilder("foo").field( + FIELD_NAME + ).script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); final int size = randomIntBetween(100, 1000); final List sample = new ArrayList<>(size); - testAggregation(aggregationBuilder, - new MatchAllDocsQuery(), - randomSample(size, point -> { - sample.add(point); - return singleton(new SortedNumericDocValuesField(FIELD_NAME, point)); - }), - agg -> { - assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(sample))); - assertTrue(AggregationInspectionHelper.hasValue(agg)); - }, fieldType); + testAggregation(aggregationBuilder, new MatchAllDocsQuery(), randomSample(size, point -> { + sample.add(point); + return singleton(new SortedNumericDocValuesField(FIELD_NAME, point)); + }), agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(sample))); + assertTrue(AggregationInspectionHelper.hasValue(agg)); + }, fieldType); } public void testSingleScript() throws IOException { - MedianAbsoluteDeviationAggregationBuilder aggregationBuilder = new MedianAbsoluteDeviationAggregationBuilder("foo") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SINGLE_SCRIPT, Collections.emptyMap())); + MedianAbsoluteDeviationAggregationBuilder aggregationBuilder = new MedianAbsoluteDeviationAggregationBuilder("foo").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SINGLE_SCRIPT, Collections.emptyMap()) + ); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberFieldMapper.NumberType.LONG); final int size = randomIntBetween(100, 1000); final List sample = new ArrayList<>(size); - testAggregation(aggregationBuilder, - new MatchAllDocsQuery(), - iw -> { - for (int i = 0; i < 10; i++) { - iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, i + 1))); - } - }, - agg -> { - assertEquals(0, agg.getMedianAbsoluteDeviation(), 0); - assertTrue(AggregationInspectionHelper.hasValue(agg)); - }, fieldType); + testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + for (int i = 0; i < 10; i++) { + iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, i + 1))); + } + }, agg -> { + assertEquals(0, agg.getMedianAbsoluteDeviation(), 0); + assertTrue(AggregationInspectionHelper.hasValue(agg)); + }, fieldType); } - private void testAggregation(Query query, - CheckedConsumer buildIndex, - Consumer verify) throws IOException { - MedianAbsoluteDeviationAggregationBuilder builder = new MedianAbsoluteDeviationAggregationBuilder("mad") - .field(FIELD_NAME) + private void testAggregation( + Query query, + CheckedConsumer buildIndex, + Consumer verify + ) throws IOException { + MedianAbsoluteDeviationAggregationBuilder builder = new MedianAbsoluteDeviationAggregationBuilder("mad").field(FIELD_NAME) .compression(randomDoubleBetween(20, 1000, true)); - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberFieldMapper.NumberType.LONG); testAggregation(builder, query, buildIndex, verify, fieldType); } @@ -276,11 +250,7 @@ protected boolean matchesSafely(Double actual) { @Override public void describeTo(Description description) { - description - .appendText("within ") - .appendValue(error * 100) - .appendText(" percent of ") - .appendValue(expected); + description.appendText("within ").appendValue(error * 100).appendText(" percent of ").appendValue(expected); } public static IsCloseToRelative closeToRelative(double expected, double error) { @@ -299,29 +269,21 @@ public static IsCloseToRelative closeToRelative(double expected) { public static class ExactMedianAbsoluteDeviation { public static double calculateMAD(int[] sample) { - return calculateMAD(Arrays.stream(sample) - .mapToDouble(point -> (double) point) - .toArray()); + return calculateMAD(Arrays.stream(sample).mapToDouble(point -> (double) point).toArray()); } public static double calculateMAD(long[] sample) { - return calculateMAD(Arrays.stream(sample) - .mapToDouble(point -> (double) point) - .toArray()); + return calculateMAD(Arrays.stream(sample).mapToDouble(point -> (double) point).toArray()); } public static double calculateMAD(List sample) { - return calculateMAD(sample.stream() - .mapToDouble(Long::doubleValue) - .toArray()); + return calculateMAD(sample.stream().mapToDouble(Long::doubleValue).toArray()); } public static double calculateMAD(double[] sample) { final double median = calculateMedian(sample); - final double[] deviations = Arrays.stream(sample) - .map(point -> Math.abs(median - point)) - .toArray(); + final double[] deviations = Arrays.stream(sample).map(point -> Math.abs(median - point)).toArray(); final double mad = calculateMedian(deviations); return mad; @@ -360,9 +322,7 @@ protected ScriptService getMockScriptService() { scripts.put(VALUE_SCRIPT, vars -> ((Number) vars.get("_value")).doubleValue() + 1); scripts.put(SINGLE_SCRIPT, vars -> 1); - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, - scripts, - Collections.emptyMap()); + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, scripts, Collections.emptyMap()); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationTests.java index 4713f0b16ba8b..e9ec317bdc033 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationTests.java @@ -12,8 +12,9 @@ public class MedianAbsoluteDeviationTests extends AbstractNumericMetricTestCase< @Override protected MedianAbsoluteDeviationAggregationBuilder doCreateTestAggregatorFactory() { - MedianAbsoluteDeviationAggregationBuilder builder = - new MedianAbsoluteDeviationAggregationBuilder(randomAlphaOfLengthBetween(1, 20)); + MedianAbsoluteDeviationAggregationBuilder builder = new MedianAbsoluteDeviationAggregationBuilder( + randomAlphaOfLengthBetween(1, 20) + ); if (randomBoolean()) { builder.compression(randomDoubleBetween(0, 1000.0, false)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MetricAggScriptPlugin.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MetricAggScriptPlugin.java index 7a8f5124fad21..e0e5dd95c8827 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MetricAggScriptPlugin.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MetricAggScriptPlugin.java @@ -8,6 +8,10 @@ package org.elasticsearch.search.aggregations.metrics; +import org.elasticsearch.script.MockScriptPlugin; +import org.elasticsearch.search.lookup.LeafDocLookup; +import org.elasticsearch.test.ESTestCase; + import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -15,10 +19,6 @@ import java.util.function.BiFunction; import java.util.function.Function; -import org.elasticsearch.script.MockScriptPlugin; -import org.elasticsearch.search.lookup.LeafDocLookup; -import org.elasticsearch.test.ESTestCase; - /** * Provides a number of dummy scripts for tests. * diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java index 339b99523434e..49824e84f130e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java @@ -34,9 +34,9 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Tuple; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -139,10 +139,12 @@ protected ScriptService getMockScriptService() { Map, Object>> nonDeterministicScripts = new HashMap<>(); nonDeterministicScripts.put(RANDOM_SCRIPT, vars -> AggregatorTestCase.randomDouble()); - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, + MockScriptEngine scriptEngine = new MockScriptEngine( + MockScriptEngine.NAME, scripts, nonDeterministicScripts, - Collections.emptyMap()); + Collections.emptyMap() + ); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); @@ -255,12 +257,16 @@ public void testUnsupportedType() { MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("not_a_number"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); - }, (Consumer) min -> { - fail("Should have thrown exception"); - }, fieldType)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> testCase( + aggregationBuilder, + new MatchAllDocsQuery(), + iw -> { iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); }, + (Consumer) min -> { fail("Should have thrown exception"); }, + fieldType + ) + ); assertEquals("Field [not_a_number] of type [keyword] is not supported for aggregation [min]", e.getMessage()); } @@ -269,13 +275,10 @@ public void testBadMissingField() { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - expectThrows(NumberFormatException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField("number", 7))); - iw.addDocument(singleton(new NumericDocValuesField("number", 1))); - }, (Consumer) min -> { - fail("Should have thrown exception"); - }, fieldType)); + expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField("number", 7))); + iw.addDocument(singleton(new NumericDocValuesField("number", 1))); + }, (Consumer) min -> { fail("Should have thrown exception"); }, fieldType)); } public void testUnmappedWithBadMissingField() { @@ -283,17 +286,16 @@ public void testUnmappedWithBadMissingField() { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - expectThrows(NumberFormatException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField("number", 7))); - iw.addDocument(singleton(new NumericDocValuesField("number", 1))); - }, (Consumer) min -> { - fail("Should have thrown exception"); - }, fieldType)); + expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField("number", 7))); + iw.addDocument(singleton(new NumericDocValuesField("number", 1))); + }, (Consumer) min -> { fail("Should have thrown exception"); }, fieldType)); } public void testEmptyBucket() throws IOException { - HistogramAggregationBuilder histogram = new HistogramAggregationBuilder("histo").field("number").interval(1).minDocCount(0) + HistogramAggregationBuilder histogram = new HistogramAggregationBuilder("histo").field("number") + .interval(1) + .minDocCount(0) .subAggregation(new MinAggregationBuilder("min").field("number")); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); @@ -319,7 +321,6 @@ public void testEmptyBucket() throws IOException { assertEquals(3.0, min.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); - }, fieldType); } @@ -339,8 +340,9 @@ public void testFormatter() throws IOException { } public void testGetProperty() throws IOException { - GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global") - .subAggregation(new MinAggregationBuilder("min").field("number")); + GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( + new MinAggregationBuilder("min").field("number") + ); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); @@ -365,20 +367,20 @@ public void testSingleValuedFieldPartiallyUnmapped() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number"); - try (Directory directory = newDirectory(); - Directory unmappedDirectory = newDirectory()) { + try (Directory directory = newDirectory(); Directory unmappedDirectory = newDirectory()) { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); indexWriter.addDocument(singleton(new NumericDocValuesField("number", 7))); indexWriter.addDocument(singleton(new NumericDocValuesField("number", 2))); indexWriter.addDocument(singleton(new NumericDocValuesField("number", 3))); indexWriter.close(); - RandomIndexWriter unmappedIndexWriter = new RandomIndexWriter(random(), unmappedDirectory); unmappedIndexWriter.close(); - try (IndexReader indexReader = DirectoryReader.open(directory); - IndexReader unamappedIndexReader = DirectoryReader.open(unmappedDirectory)) { + try ( + IndexReader indexReader = DirectoryReader.open(directory); + IndexReader unamappedIndexReader = DirectoryReader.open(unmappedDirectory) + ) { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); @@ -395,21 +397,21 @@ public void testSingleValuedFieldPartiallyUnmappedWithMissing() throws IOExcepti MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number").missing(-19L); - try (Directory directory = newDirectory(); - Directory unmappedDirectory = newDirectory()) { + try (Directory directory = newDirectory(); Directory unmappedDirectory = newDirectory()) { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); indexWriter.addDocument(singleton(new NumericDocValuesField("number", 7))); indexWriter.addDocument(singleton(new NumericDocValuesField("number", 2))); indexWriter.addDocument(singleton(new NumericDocValuesField("number", 3))); indexWriter.close(); - RandomIndexWriter unmappedIndexWriter = new RandomIndexWriter(random(), unmappedDirectory); unmappedIndexWriter.addDocument(singleton(new NumericDocValuesField("unrelated", 100))); unmappedIndexWriter.close(); - try (IndexReader indexReader = DirectoryReader.open(directory); - IndexReader unamappedIndexReader = DirectoryReader.open(unmappedDirectory)) { + try ( + IndexReader indexReader = DirectoryReader.open(directory); + IndexReader unamappedIndexReader = DirectoryReader.open(unmappedDirectory) + ) { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); @@ -424,8 +426,7 @@ public void testSingleValuedFieldPartiallyUnmappedWithMissing() throws IOExcepti public void testSingleValuedFieldWithValueScript() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min") - .field("number") + MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, INVERT_SCRIPT, Collections.emptyMap())); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -442,8 +443,7 @@ public void testSingleValuedFieldWithValueScript() throws IOException { public void testSingleValuedFieldWithValueScriptAndMissing() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min") - .field("number") + MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .missing(-100L) .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, INVERT_SCRIPT, Collections.emptyMap())); @@ -462,8 +462,7 @@ public void testSingleValuedFieldWithValueScriptAndMissing() throws IOException public void testSingleValuedFieldWithValueScriptAndParams() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min") - .field("number") + MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.singletonMap("inc", 5))); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -480,8 +479,9 @@ public void testSingleValuedFieldWithValueScriptAndParams() throws IOException { public void testScript() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SCRIPT_NAME, Collections.emptyMap())); + MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SCRIPT_NAME, Collections.emptyMap()) + ); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -514,8 +514,7 @@ public void testMultiValuedField() throws IOException { } public void testMultiValuedFieldWithScript() throws IOException { - MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min") - .field("number") + MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, INVERT_SCRIPT, Collections.emptyMap())); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); @@ -535,8 +534,7 @@ public void testMultiValuedFieldWithScript() throws IOException { } public void testMultiValuedFieldWithScriptParams() throws IOException { - MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min") - .field("number") + MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.singletonMap("inc", 5))); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); @@ -556,11 +554,13 @@ public void testMultiValuedFieldWithScriptParams() throws IOException { } public void testOrderByEmptyAggregation() throws IOException { - AggregationBuilder termsBuilder = new TermsAggregationBuilder("terms") - .field("number") + AggregationBuilder termsBuilder = new TermsAggregationBuilder("terms").field("number") .order(BucketOrder.compound(BucketOrder.aggregation("filter>min", true))) - .subAggregation(new FilterAggregationBuilder("filter", termQuery("number", 100)) - .subAggregation(new MinAggregationBuilder("min").field("number"))); + .subAggregation( + new FilterAggregationBuilder("filter", termQuery("number", 100)).subAggregation( + new MinAggregationBuilder("min").field("number") + ) + ); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); @@ -601,7 +601,6 @@ public void testCaching() throws IOException { indexWriter.addDocument(singleton(new NumericDocValuesField("number", 3))); indexWriter.close(); - try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); @@ -615,12 +614,10 @@ public void testCaching() throws IOException { public void testScriptCaching() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min") - .field("number") + MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, INVERT_SCRIPT, Collections.emptyMap())); - MinAggregationBuilder nonDeterministicAggregationBuilder = new MinAggregationBuilder("min") - .field("number") + MinAggregationBuilder nonDeterministicAggregationBuilder = new MinAggregationBuilder("min").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, RANDOM_SCRIPT, Collections.emptyMap())); try (Directory directory = newDirectory()) { @@ -630,7 +627,6 @@ public void testScriptCaching() throws IOException { indexWriter.addDocument(singleton(new NumericDocValuesField("number", 3))); indexWriter.close(); - try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); @@ -649,27 +645,25 @@ public void testMinShortcutRandom() throws Exception { testMinShortcutCase( () -> randomLongBetween(Integer.MIN_VALUE, Integer.MAX_VALUE), (n) -> new LongPoint("number", n.longValue()), - (v) -> LongPoint.decodeDimension(v, 0)); + (v) -> LongPoint.decodeDimension(v, 0) + ); - testMinShortcutCase( - () -> randomInt(), - (n) -> new IntPoint("number", n.intValue()), - (v) -> IntPoint.decodeDimension(v, 0)); + testMinShortcutCase(() -> randomInt(), (n) -> new IntPoint("number", n.intValue()), (v) -> IntPoint.decodeDimension(v, 0)); - testMinShortcutCase( - () -> randomFloat(), - (n) -> new FloatPoint("number", n.floatValue()), - (v) -> FloatPoint.decodeDimension(v, 0)); + testMinShortcutCase(() -> randomFloat(), (n) -> new FloatPoint("number", n.floatValue()), (v) -> FloatPoint.decodeDimension(v, 0)); testMinShortcutCase( () -> randomDouble(), (n) -> new DoublePoint("number", n.doubleValue()), - (v) -> DoublePoint.decodeDimension(v, 0)); + (v) -> DoublePoint.decodeDimension(v, 0) + ); } - private void testMinShortcutCase(Supplier randomNumber, - Function pointFieldFunc, - Function pointConvertFunc) throws IOException { + private void testMinShortcutCase( + Supplier randomNumber, + Function pointFieldFunc, + Function pointConvertFunc + ) throws IOException { Directory directory = newDirectory(); IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE); IndexWriter indexWriter = new IndexWriter(directory, config); @@ -687,7 +681,7 @@ private void testMinShortcutCase(Supplier randomNumber, document.add(pointFieldFunc.apply(nextValue)); document.add(pointFieldFunc.apply(nextValue)); documents.add(document); - docID ++; + docID++; } } // insert some documents without a value for the metric field. @@ -703,14 +697,14 @@ private void testMinShortcutCase(Supplier randomNumber, assertThat(res, equalTo(values.get(0).v2())); } for (int i = 1; i < values.size(); i++) { - indexWriter.deleteDocuments(new Term("id", values.get(i-1).v1().toString())); + indexWriter.deleteDocuments(new Term("id", values.get(i - 1).v1().toString())); try (IndexReader reader = DirectoryReader.open(indexWriter)) { LeafReaderContext ctx = reader.leaves().get(0); Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc); assertThat(res, equalTo(values.get(i).v2())); } } - indexWriter.deleteDocuments(new Term("id", values.get(values.size()-1).v1().toString())); + indexWriter.deleteDocuments(new Term("id", values.get(values.size() - 1).v1().toString())); try (IndexReader reader = DirectoryReader.open(indexWriter)) { LeafReaderContext ctx = reader.leaves().get(0); Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc); @@ -720,9 +714,8 @@ private void testMinShortcutCase(Supplier randomNumber, directory.close(); } - private void testCase(Query query, - CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) + throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number"); testCase(aggregationBuilder, query, buildIndex, verify, fieldType); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethodTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethodTests.java index 5f6912a6078cd..60da7b6c5e618 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethodTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethodTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -62,7 +61,7 @@ public void testInvalidReadFrom() throws Exception { try (StreamInput in = out.bytes().streamInput()) { PercentilesMethod.readFromStream(in); fail("Expected IOException"); - } catch(IOException e) { + } catch (IOException e) { assertThat(e.getMessage(), containsString("Unknown PercentilesMethod ordinal [")); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java index 2a61810ecda7a..08265565d60f7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java @@ -74,23 +74,25 @@ public void testDuplicatePercentilesThrows() throws IOException { } public void testExceptionMultipleMethods() throws IOException { - final String illegalAgg = "{\n" + - " \"percentiles\": {\n" + - " \"field\": \"load_time\",\n" + - " \"percents\": [99],\n" + - " \"tdigest\": {\n" + - " \"compression\": 200\n" + - " },\n" + - " \"hdr\": {\n" + - " \"number_of_significant_value_digits\": 3\n" + - " }\n" + - " }\n" + - "}"; + final String illegalAgg = "{\n" + + " \"percentiles\": {\n" + + " \"field\": \"load_time\",\n" + + " \"percents\": [99],\n" + + " \"tdigest\": {\n" + + " \"compression\": 200\n" + + " },\n" + + " \"hdr\": {\n" + + " \"number_of_significant_value_digits\": 3\n" + + " }\n" + + " }\n" + + "}"; XContentParser parser = createParser(JsonXContent.jsonXContent, illegalAgg); assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); - XContentParseException e = expectThrows(XContentParseException.class, - () -> PercentilesAggregationBuilder.PARSER.parse(parser, "myPercentiles")); + XContentParseException e = expectThrows( + XContentParseException.class, + () -> PercentilesAggregationBuilder.PARSER.parse(parser, "myPercentiles") + ); assertThat(e.getMessage(), containsString("[percentiles] failed to parse field [hdr]")); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java index d7c8368588bba..c92ce332dde7a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java @@ -17,11 +17,11 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; @@ -55,36 +55,88 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase { private static final String AGG_NAME = "scriptedMetric"; private static final Script INIT_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScript", Collections.emptyMap()); private static final Script MAP_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "mapScript", Collections.emptyMap()); - private static final Script COMBINE_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScript", - Collections.emptyMap()); - private static final Script REDUCE_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "reduceScript", - Collections.emptyMap()); - - private static final Script INIT_SCRIPT_SCORE = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScriptScore", - Collections.emptyMap()); - private static final Script MAP_SCRIPT_SCORE = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "mapScriptScore", - Collections.emptyMap()); - private static final Script COMBINE_SCRIPT_SCORE = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScriptScore", - Collections.emptyMap()); - private static final Script COMBINE_SCRIPT_NOOP = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScriptNoop", - Collections.emptyMap()); - - private static final Script INIT_SCRIPT_PARAMS = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScriptParams", - Collections.singletonMap("initialValue", 24)); - private static final Script MAP_SCRIPT_PARAMS = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "mapScriptParams", - Collections.singletonMap("itemValue", 12)); - private static final Script COMBINE_SCRIPT_PARAMS = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScriptParams", - Collections.singletonMap("multiplier", 4)); - private static final Script REDUCE_SCRIPT_PARAMS = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "reduceScriptParams", - Collections.singletonMap("additional", 2)); + private static final Script COMBINE_SCRIPT = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "combineScript", + Collections.emptyMap() + ); + private static final Script REDUCE_SCRIPT = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "reduceScript", + Collections.emptyMap() + ); + + private static final Script INIT_SCRIPT_SCORE = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "initScriptScore", + Collections.emptyMap() + ); + private static final Script MAP_SCRIPT_SCORE = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "mapScriptScore", + Collections.emptyMap() + ); + private static final Script COMBINE_SCRIPT_SCORE = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "combineScriptScore", + Collections.emptyMap() + ); + private static final Script COMBINE_SCRIPT_NOOP = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "combineScriptNoop", + Collections.emptyMap() + ); + + private static final Script INIT_SCRIPT_PARAMS = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "initScriptParams", + Collections.singletonMap("initialValue", 24) + ); + private static final Script MAP_SCRIPT_PARAMS = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "mapScriptParams", + Collections.singletonMap("itemValue", 12) + ); + private static final Script COMBINE_SCRIPT_PARAMS = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "combineScriptParams", + Collections.singletonMap("multiplier", 4) + ); + private static final Script REDUCE_SCRIPT_PARAMS = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "reduceScriptParams", + Collections.singletonMap("additional", 2) + ); private static final String CONFLICTING_PARAM_NAME = "initialValue"; - private static final Script INIT_SCRIPT_SELF_REF = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScriptSelfRef", - Collections.emptyMap()); - private static final Script MAP_SCRIPT_SELF_REF = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "mapScriptSelfRef", - Collections.emptyMap()); - private static final Script COMBINE_SCRIPT_SELF_REF = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScriptSelfRef", - Collections.emptyMap()); + private static final Script INIT_SCRIPT_SELF_REF = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "initScriptSelfRef", + Collections.emptyMap() + ); + private static final Script MAP_SCRIPT_SELF_REF = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "mapScriptSelfRef", + Collections.emptyMap() + ); + private static final Script COMBINE_SCRIPT_SELF_REF = new Script( + ScriptType.INLINE, + MockScriptEngine.NAME, + "combineScriptSelfRef", + Collections.emptyMap() + ); private static final Script INIT_SCRIPT_MAKING_ARRAY = new Script( ScriptType.INLINE, @@ -118,10 +170,7 @@ public static void initMockScripts() { }); SCRIPTS.put("reduceScript", params -> { List states = (List) params.get("states"); - return states.stream() - .filter(a -> a instanceof Number) - .map(a -> (Number) a) - .mapToInt(Number::intValue).sum(); + return states.stream().filter(a -> a instanceof Number).map(a -> (Number) a).mapToInt(Number::intValue).sum(); }); SCRIPTS.put("initScriptScore", params -> { @@ -141,7 +190,7 @@ public static void initMockScripts() { SCRIPTS.put("initScriptParams", params -> { Map state = (Map) params.get("state"); - Integer initialValue = (Integer)params.get("initialValue"); + Integer initialValue = (Integer) params.get("initialValue"); ArrayList collector = new ArrayList<>(); collector.add(initialValue); state.put("collector", collector); @@ -158,10 +207,10 @@ public static void initMockScripts() { int multiplier = ((Integer) params.get("multiplier")); return ((List) state.get("collector")).stream().mapToInt(Integer::intValue).map(i -> i * multiplier).sum(); }); - SCRIPTS.put("reduceScriptParams", params -> - ((List)params.get("states")).stream().mapToInt(i -> (int)i).sum() + - (int)params.get("aggs_param") + (int)params.get("additional") - - ((List)params.get("states")).size()*24*4 + SCRIPTS.put( + "reduceScriptParams", + params -> ((List) params.get("states")).stream().mapToInt(i -> (int) i).sum() + (int) params.get("aggs_param") + (int) params + .get("additional") - ((List) params.get("states")).size() * 24 * 4 ); SCRIPTS.put("initScriptSelfRef", params -> { @@ -178,16 +227,16 @@ public static void initMockScripts() { }); SCRIPTS.put("combineScriptSelfRef", params -> { - Map state = (Map) params.get("state"); - state.put("selfRef", state); - return state; + Map state = (Map) params.get("state"); + state.put("selfRef", state); + return state; }); SCRIPTS.put("initScriptMakingArray", params -> { Map state = (Map) params.get("state"); - state.put("array", new String[] {"foo", "bar"}); + state.put("array", new String[] { "foo", "bar" }); state.put("collector", new ArrayList()); return state; - }); + }); } private CircuitBreakerService circuitBreakerService; @@ -224,15 +273,12 @@ protected void afterClose() { @Override protected ScriptService getMockScriptService() { - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, - SCRIPTS, - Collections.emptyMap()); + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, SCRIPTS, Collections.emptyMap()); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); } - @SuppressWarnings("unchecked") public void testNoDocs() throws IOException { try (Directory directory = newDirectory()) { @@ -242,8 +288,11 @@ public void testNoDocs() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT_NOOP).reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = - searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); + ScriptedMetric scriptedMetric = searchAndReduce( + newSearcher(indexReader, true, true), + new MatchAllDocsQuery(), + aggregationBuilder + ); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); assertEquals(0, scriptedMetric.aggregation()); @@ -262,8 +311,10 @@ public void testScriptedMetricWithoutCombine() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).reduceScript(REDUCE_SCRIPT); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)); + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) + ); assertEquals(exception.getMessage(), "[combineScript] must not be null: [scriptedMetric]"); } } @@ -280,8 +331,10 @@ public void testScriptedMetricWithoutReduce() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)); + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) + ); assertEquals(exception.getMessage(), "[reduceScript] must not be null: [scriptedMetric]"); } } @@ -300,10 +353,12 @@ public void testScriptedMetricWithCombine() throws IOException { } try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); - aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT) - .combineScript(COMBINE_SCRIPT).reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = - searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); + aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT).reduceScript(REDUCE_SCRIPT); + ScriptedMetric scriptedMetric = searchAndReduce( + newSearcher(indexReader, true, true), + new MatchAllDocsQuery(), + aggregationBuilder + ); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); assertEquals(numDocs, scriptedMetric.aggregation()); @@ -324,10 +379,15 @@ public void testScriptedMetricWithCombineAccessesScores() throws IOException { } try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); - aggregationBuilder.initScript(INIT_SCRIPT_SCORE).mapScript(MAP_SCRIPT_SCORE) - .combineScript(COMBINE_SCRIPT_SCORE).reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = - searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); + aggregationBuilder.initScript(INIT_SCRIPT_SCORE) + .mapScript(MAP_SCRIPT_SCORE) + .combineScript(COMBINE_SCRIPT_SCORE) + .reduceScript(REDUCE_SCRIPT); + ScriptedMetric scriptedMetric = searchAndReduce( + newSearcher(indexReader, true, true), + new MatchAllDocsQuery(), + aggregationBuilder + ); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); // all documents have score of 1.0 @@ -348,10 +408,15 @@ public void testScriptParamsPassedThrough() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); - aggregationBuilder.initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS) - .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = - searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); + aggregationBuilder.initScript(INIT_SCRIPT_PARAMS) + .mapScript(MAP_SCRIPT_PARAMS) + .combineScript(COMBINE_SCRIPT_PARAMS) + .reduceScript(REDUCE_SCRIPT); + ScriptedMetric scriptedMetric = searchAndReduce( + newSearcher(indexReader, true, true), + new MatchAllDocsQuery(), + aggregationBuilder + ); // The result value depends on the script params. assertEquals(4896, scriptedMetric.aggregation()); @@ -362,7 +427,7 @@ public void testScriptParamsPassedThrough() throws IOException { public void testAggParamsPassedToReduceScript() throws IOException { MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, SCRIPTS, Collections.emptyMap()); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); - ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); + ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { @@ -374,10 +439,16 @@ public void testAggParamsPassedToReduceScript() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.params(Collections.singletonMap("aggs_param", 1)) - .initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS) - .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT_PARAMS); + .initScript(INIT_SCRIPT_PARAMS) + .mapScript(MAP_SCRIPT_PARAMS) + .combineScript(COMBINE_SCRIPT_PARAMS) + .reduceScript(REDUCE_SCRIPT_PARAMS); ScriptedMetric scriptedMetric = searchAndReduce( - newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder, 0); + newSearcher(indexReader, true, true), + new MatchAllDocsQuery(), + aggregationBuilder, + 0 + ); // The result value depends on the script params. assertEquals(4803, scriptedMetric.aggregation()); @@ -396,14 +467,20 @@ public void testConflictingAggAndScriptParams() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); Map aggParams = Collections.singletonMap(CONFLICTING_PARAM_NAME, "blah"); - aggregationBuilder.params(aggParams).initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS) - .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT); - - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) + aggregationBuilder.params(aggParams) + .initScript(INIT_SCRIPT_PARAMS) + .mapScript(MAP_SCRIPT_PARAMS) + .combineScript(COMBINE_SCRIPT_PARAMS) + .reduceScript(REDUCE_SCRIPT); + + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) + ); + assertEquals( + "Parameter name \"" + CONFLICTING_PARAM_NAME + "\" used in both aggregation and script parameters", + ex.getMessage() ); - assertEquals("Parameter name \"" + CONFLICTING_PARAM_NAME + "\" used in both aggregation and script parameters", - ex.getMessage()); } } } @@ -415,11 +492,14 @@ public void testSelfReferencingAggStateAfterInit() throws IOException { } try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); - aggregationBuilder.initScript(INIT_SCRIPT_SELF_REF).mapScript(MAP_SCRIPT) - .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT); + aggregationBuilder.initScript(INIT_SCRIPT_SELF_REF) + .mapScript(MAP_SCRIPT) + .combineScript(COMBINE_SCRIPT_PARAMS) + .reduceScript(REDUCE_SCRIPT); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) ); assertEquals("Iterable object is self-referencing itself (Scripted metric aggs init script)", ex.getMessage()); } @@ -436,11 +516,14 @@ public void testSelfReferencingAggStateAfterMap() throws IOException { } try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); - aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT_SELF_REF) - .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT); - - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) + aggregationBuilder.initScript(INIT_SCRIPT) + .mapScript(MAP_SCRIPT_SELF_REF) + .combineScript(COMBINE_SCRIPT_PARAMS) + .reduceScript(REDUCE_SCRIPT); + + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) ); assertEquals("Iterable object is self-referencing itself (Scripted metric aggs map script)", ex.getMessage()); } @@ -454,11 +537,14 @@ public void testSelfReferencingAggStateAfterCombine() throws IOException { } try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); - aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT) - .combineScript(COMBINE_SCRIPT_SELF_REF).reduceScript(REDUCE_SCRIPT); + aggregationBuilder.initScript(INIT_SCRIPT) + .mapScript(MAP_SCRIPT) + .combineScript(COMBINE_SCRIPT_SELF_REF) + .reduceScript(REDUCE_SCRIPT); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) ); assertEquals("Iterable object is self-referencing itself (Scripted metric aggs combine script)", ex.getMessage()); } @@ -467,17 +553,21 @@ public void testSelfReferencingAggStateAfterCombine() throws IOException { public void testInitScriptMakesArray() throws IOException { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); - aggregationBuilder.initScript(INIT_SCRIPT_MAKING_ARRAY).mapScript(MAP_SCRIPT) - .combineScript(COMBINE_SCRIPT).reduceScript(REDUCE_SCRIPT); - testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(new Document()); - }, (InternalScriptedMetric r) -> { - assertEquals(1, r.aggregation()); - }); + aggregationBuilder.initScript(INIT_SCRIPT_MAKING_ARRAY) + .mapScript(MAP_SCRIPT) + .combineScript(COMBINE_SCRIPT) + .reduceScript(REDUCE_SCRIPT); + testCase( + aggregationBuilder, + new MatchAllDocsQuery(), + iw -> { iw.addDocument(new Document()); }, + (InternalScriptedMetric r) -> { assertEquals(1, r.aggregation()); } + ); } public void testAsSubAgg() throws IOException { - AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("t").field("t").executionHint("map") + AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("t").field("t") + .executionHint("map") .subAggregation( new ScriptedMetricAggregationBuilder("scripted").initScript(INIT_SCRIPT) .mapScript(MAP_SCRIPT) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java index cb45f9d8c3073..99e9e23159515 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java @@ -45,7 +45,11 @@ private Script randomScript(String script) { } else { ScriptType type = randomFrom(ScriptType.values()); return new Script( - type, type == ScriptType.STORED ? null : randomFrom("my_lang", Script.DEFAULT_SCRIPT_LANG), script, Collections.emptyMap()); + type, + type == ScriptType.STORED ? null : randomFrom("my_lang", Script.DEFAULT_SCRIPT_LANG), + script, + Collections.emptyMap() + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java index c8cadaa308cf4..cce5f5cfb6afb 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java @@ -18,8 +18,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; @@ -64,70 +64,56 @@ public class StatsAggregatorTests extends AggregatorTestCase { public void testEmpty() throws IOException { final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); - testCase( - stats("_name").field(ft.name()), - iw -> {}, - stats -> { - assertEquals(0d, stats.getCount(), 0); - assertEquals(0d, stats.getSum(), 0); - assertEquals(Float.NaN, stats.getAvg(), 0); - assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0); - assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0); - assertFalse(AggregationInspectionHelper.hasValue(stats)); - }, - ft - ); + testCase(stats("_name").field(ft.name()), iw -> {}, stats -> { + assertEquals(0d, stats.getCount(), 0); + assertEquals(0d, stats.getSum(), 0); + assertEquals(Float.NaN, stats.getAvg(), 0); + assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0); + assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0); + assertFalse(AggregationInspectionHelper.hasValue(stats)); + }, ft); } public void testRandomDoubles() throws IOException { final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.DOUBLE); final SimpleStatsAggregator expected = new SimpleStatsAggregator(); - testCase( - stats("_name").field(ft.name()), - iw -> { - int numDocs = randomIntBetween(10, 50); - for (int i = 0; i < numDocs; i++) { - Document doc = new Document(); - int numValues = randomIntBetween(1, 5); - for (int j = 0; j < numValues; j++) { - double value = randomDoubleBetween(-100d, 100d, true); - long valueAsLong = NumericUtils.doubleToSortableLong(value); - doc.add(new SortedNumericDocValuesField(ft.name(), valueAsLong)); - expected.add(value); - } - iw.addDocument(doc); + testCase(stats("_name").field(ft.name()), iw -> { + int numDocs = randomIntBetween(10, 50); + for (int i = 0; i < numDocs; i++) { + Document doc = new Document(); + int numValues = randomIntBetween(1, 5); + for (int j = 0; j < numValues; j++) { + double value = randomDoubleBetween(-100d, 100d, true); + long valueAsLong = NumericUtils.doubleToSortableLong(value); + doc.add(new SortedNumericDocValuesField(ft.name(), valueAsLong)); + expected.add(value); } - }, - stats -> { - assertEquals(expected.count, stats.getCount(), 0); - assertEquals(expected.sum, stats.getSum(), TOLERANCE); - assertEquals(expected.min, stats.getMin(), 0); - assertEquals(expected.max, stats.getMax(), 0); - assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); - assertTrue(AggregationInspectionHelper.hasValue(stats)); - }, - ft - ); + iw.addDocument(doc); + } + }, stats -> { + assertEquals(expected.count, stats.getCount(), 0); + assertEquals(expected.sum, stats.getSum(), TOLERANCE); + assertEquals(expected.min, stats.getMin(), 0); + assertEquals(expected.max, stats.getMax(), 0); + assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); + assertTrue(AggregationInspectionHelper.hasValue(stats)); + }, ft); } public void testRandomLongs() throws IOException { - randomLongsTestCase( - randomIntBetween(1, 5), - stats("_name").field("field"), - (expected, stats) -> { - assertEquals(expected.count, stats.getCount(), 0); - assertEquals(expected.sum, stats.getSum(), TOLERANCE); - assertEquals(expected.min, stats.getMin(), 0); - assertEquals(expected.max, stats.getMax(), 0); - assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); - assertTrue(AggregationInspectionHelper.hasValue(stats)); - } - ); + randomLongsTestCase(randomIntBetween(1, 5), stats("_name").field("field"), (expected, stats) -> { + assertEquals(expected.count, stats.getCount(), 0); + assertEquals(expected.sum, stats.getSum(), TOLERANCE); + assertEquals(expected.min, stats.getMin(), 0); + assertEquals(expected.max, stats.getMax(), 0); + assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); + assertTrue(AggregationInspectionHelper.hasValue(stats)); + }); } public void testSummationAccuracy() throws IOException { // Summing up a normal array and expect an accurate value - double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7}; + double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifySummationOfDoubles(values, 15.3, 0.9, 0d, values.length * TOLERANCE); // Summing up an array which contains NaN and infinities and expect a result same as naive summation @@ -173,64 +159,52 @@ private void verifySummationOfDoubles( } double expectedMax = max; double expectedMin = min; - testCase( - stats("_name").field(ft.name()), - iw -> { - List> docs = new ArrayList<>(); - for (double value : values) { - docs.add(singletonList(new NumericDocValuesField(ft.name(), NumericUtils.doubleToSortableLong(value)))); - } - iw.addDocuments(docs); - }, - stats -> { - assertEquals(values.length, stats.getCount()); - assertEquals(expectedAvg, stats.getAvg(), singleSegmentDelta); - assertEquals(expectedSum, stats.getSum(), singleSegmentDelta); - assertEquals(expectedMax, stats.getMax(), 0d); - assertEquals(expectedMin, stats.getMin(), 0d); - assertTrue(AggregationInspectionHelper.hasValue(stats)); - }, - ft - ); - testCase( - stats("_name").field(ft.name()), - iw -> { - for (double value : values) { - iw.addDocument(singletonList(new NumericDocValuesField(ft.name(), NumericUtils.doubleToSortableLong(value)))); - } - }, - stats -> { - assertEquals(values.length, stats.getCount()); - assertEquals(expectedAvg, stats.getAvg(), manySegmentDelta); - assertEquals(expectedSum, stats.getSum(), manySegmentDelta); - assertEquals(expectedMax, stats.getMax(), 0d); - assertEquals(expectedMin, stats.getMin(), 0d); - assertTrue(AggregationInspectionHelper.hasValue(stats)); - }, - ft - ); + testCase(stats("_name").field(ft.name()), iw -> { + List> docs = new ArrayList<>(); + for (double value : values) { + docs.add(singletonList(new NumericDocValuesField(ft.name(), NumericUtils.doubleToSortableLong(value)))); + } + iw.addDocuments(docs); + }, stats -> { + assertEquals(values.length, stats.getCount()); + assertEquals(expectedAvg, stats.getAvg(), singleSegmentDelta); + assertEquals(expectedSum, stats.getSum(), singleSegmentDelta); + assertEquals(expectedMax, stats.getMax(), 0d); + assertEquals(expectedMin, stats.getMin(), 0d); + assertTrue(AggregationInspectionHelper.hasValue(stats)); + }, ft); + testCase(stats("_name").field(ft.name()), iw -> { + for (double value : values) { + iw.addDocument(singletonList(new NumericDocValuesField(ft.name(), NumericUtils.doubleToSortableLong(value)))); + } + }, stats -> { + assertEquals(values.length, stats.getCount()); + assertEquals(expectedAvg, stats.getAvg(), manySegmentDelta); + assertEquals(expectedSum, stats.getSum(), manySegmentDelta); + assertEquals(expectedMax, stats.getMax(), 0d); + assertEquals(expectedMin, stats.getMin(), 0d); + assertTrue(AggregationInspectionHelper.hasValue(stats)); + }, ft); } public void testUnmapped() throws IOException { - randomLongsTestCase( - randomIntBetween(1, 5), - stats("_name").field("unmapped_field"), - (expected, stats) -> { - assertEquals(0d, stats.getCount(), 0); - assertEquals(0d, stats.getSum(), 0); - assertEquals(Float.NaN, stats.getAvg(), 0); - assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0); - assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0); - assertFalse(AggregationInspectionHelper.hasValue(stats)); - } - ); + randomLongsTestCase(randomIntBetween(1, 5), stats("_name").field("unmapped_field"), (expected, stats) -> { + assertEquals(0d, stats.getCount(), 0); + assertEquals(0d, stats.getSum(), 0); + assertEquals(Float.NaN, stats.getAvg(), 0); + assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0); + assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0); + assertFalse(AggregationInspectionHelper.hasValue(stats)); + }); } public void testPartiallyUnmapped() throws IOException { - try (Directory mappedDirectory = newDirectory(); - Directory unmappedDirectory = newDirectory(); - RandomIndexWriter mappedWriter = new RandomIndexWriter(random(), mappedDirectory); - RandomIndexWriter unmappedWriter = new RandomIndexWriter(random(), unmappedDirectory)) { + try ( + Directory mappedDirectory = newDirectory(); + Directory unmappedDirectory = newDirectory(); + RandomIndexWriter mappedWriter = new RandomIndexWriter(random(), mappedDirectory); + RandomIndexWriter unmappedWriter = new RandomIndexWriter(random(), unmappedDirectory) + ) { final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); final SimpleStatsAggregator expected = new SimpleStatsAggregator(); @@ -242,9 +216,11 @@ public void testPartiallyUnmapped() throws IOException { } final StatsAggregationBuilder builder = stats("_name").field(ft.name()); - try (IndexReader mappedReader = mappedWriter.getReader(); - IndexReader unmappedReader = unmappedWriter.getReader(); - MultiReader multiReader = new MultiReader(mappedReader, unmappedReader)) { + try ( + IndexReader mappedReader = mappedWriter.getReader(); + IndexReader unmappedReader = unmappedWriter.getReader(); + MultiReader multiReader = new MultiReader(mappedReader, unmappedReader) + ) { final IndexSearcher searcher = new IndexSearcher(multiReader); final InternalStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, ft); @@ -262,9 +238,7 @@ public void testPartiallyUnmapped() throws IOException { public void testValueScriptSingleValuedField() throws IOException { randomLongsTestCase( 1, - stats("_name") - .field("field") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())), + stats("_name").field("field").script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())), (expected, stats) -> { final SimpleStatsAggregator adjusted = new SimpleStatsAggregator( expected.count, @@ -287,9 +261,7 @@ public void testValueScriptMultiValuedField() throws IOException { final int valuesPerField = randomIntBetween(2, 5); randomLongsTestCase( valuesPerField, - stats("_name") - .field("field") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())), + stats("_name").field("field").script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())), (expected, stats) -> { final SimpleStatsAggregator adjusted = new SimpleStatsAggregator( expected.count, @@ -311,8 +283,7 @@ public void testValueScriptMultiValuedField() throws IOException { public void testFieldScriptSingleValuedField() throws IOException { randomLongsTestCase( 1, - stats("_name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", "field"))), + stats("_name").script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", "field"))), (expected, stats) -> { final SimpleStatsAggregator adjusted = new SimpleStatsAggregator( expected.count, @@ -335,8 +306,7 @@ public void testFieldScriptMultiValuedField() throws IOException { final int valuesPerField = randomIntBetween(2, 5); randomLongsTestCase( valuesPerField, - stats("_name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", "field"))), + stats("_name").script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", "field"))), (expected, stats) -> { final SimpleStatsAggregator adjusted = new SimpleStatsAggregator( expected.count, @@ -374,46 +344,35 @@ public void testMissing() throws IOException { } } - testCase( - stats("_name") - .field(ft.name()) - .missing(missingValue), - iw -> iw.addDocuments(docs), - stats -> { - assertEquals(expected.count, stats.getCount(), 0); - assertEquals(expected.sum, stats.getSum(), TOLERANCE); - assertEquals(expected.max, stats.getMax(), 0); - assertEquals(expected.min, stats.getMin(), 0); - assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); - assertTrue(AggregationInspectionHelper.hasValue(stats)); - }, - ft - ); + testCase(stats("_name").field(ft.name()).missing(missingValue), iw -> iw.addDocuments(docs), stats -> { + assertEquals(expected.count, stats.getCount(), 0); + assertEquals(expected.sum, stats.getSum(), TOLERANCE); + assertEquals(expected.max, stats.getMax(), 0); + assertEquals(expected.min, stats.getMin(), 0); + assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); + assertTrue(AggregationInspectionHelper.hasValue(stats)); + }, ft); } public void testMissingUnmapped() throws IOException { final int valuesPerField = randomIntBetween(1, 5); final long missingValue = randomLongBetween(-100, 100); - randomLongsTestCase( - valuesPerField, - stats("_name") - .field("unknown_field") - .missing(missingValue), - (expected, stats) -> { - final long numDocs = expected.count / valuesPerField; - assertEquals(numDocs, stats.getCount()); - assertEquals(numDocs * missingValue, stats.getSum(), TOLERANCE); - assertEquals(missingValue, stats.getMax(), 0); - assertEquals(missingValue, stats.getMin(), 0); - assertEquals(missingValue, stats.getAvg(), TOLERANCE); - assertTrue(AggregationInspectionHelper.hasValue(stats)); - } - ); + randomLongsTestCase(valuesPerField, stats("_name").field("unknown_field").missing(missingValue), (expected, stats) -> { + final long numDocs = expected.count / valuesPerField; + assertEquals(numDocs, stats.getCount()); + assertEquals(numDocs * missingValue, stats.getSum(), TOLERANCE); + assertEquals(missingValue, stats.getMax(), 0); + assertEquals(missingValue, stats.getMin(), 0); + assertEquals(missingValue, stats.getAvg(), TOLERANCE); + assertTrue(AggregationInspectionHelper.hasValue(stats)); + }); } - private void randomLongsTestCase(int valuesPerField, - StatsAggregationBuilder builder, - BiConsumer verify) throws IOException { + private void randomLongsTestCase( + int valuesPerField, + StatsAggregationBuilder builder, + BiConsumer verify + ) throws IOException { final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); @@ -422,24 +381,19 @@ private void randomLongsTestCase(int valuesPerField, final SimpleStatsAggregator expected = new SimpleStatsAggregator(); for (int iDoc = 0; iDoc < numDocs; iDoc++) { List values = randomList(valuesPerField, valuesPerField, () -> randomLongBetween(-100, 100)); - docs.add(values.stream() - .map(value -> new SortedNumericDocValuesField(ft.name(), value)) - .collect(toSet())); + docs.add(values.stream().map(value -> new SortedNumericDocValuesField(ft.name(), value)).collect(toSet())); values.forEach(expected::add); } - testCase( - builder, - iw -> iw.addDocuments(docs), - stats -> verify.accept(expected, stats), - ft - ); + testCase(builder, iw -> iw.addDocuments(docs), stats -> verify.accept(expected, stats), ft); } - private void testCase(StatsAggregationBuilder builder, - CheckedConsumer buildIndex, - Consumer verify, - MappedFieldType... fieldTypes) throws IOException { + private void testCase( + StatsAggregationBuilder builder, + CheckedConsumer buildIndex, + Consumer verify, + MappedFieldType... fieldTypes + ) throws IOException { testCase(builder, new MatchAllDocsQuery(), buildIndex, verify, fieldTypes); } @@ -459,7 +413,7 @@ static class SimpleStatsAggregator { } void add(double value) { - count ++; + count++; if (Double.compare(value, min) < 0) { min = value; } @@ -472,27 +426,24 @@ void add(double value) { @Override protected List getSupportedValuesSourceTypes() { - return List.of(CoreValuesSourceType.NUMERIC, - CoreValuesSourceType.BOOLEAN, - CoreValuesSourceType.DATE); + return List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.DATE); } @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new StatsAggregationBuilder("_name") - .field(fieldName); + return new StatsAggregationBuilder("_name").field(fieldName); } @Override protected ScriptService getMockScriptService() { final Map, Object>> scripts = Map.of( - VALUE_SCRIPT_NAME, vars -> ((Number) vars.get("_value")).doubleValue() + 1, - FIELD_SCRIPT_NAME, vars -> { + VALUE_SCRIPT_NAME, + vars -> ((Number) vars.get("_value")).doubleValue() + 1, + FIELD_SCRIPT_NAME, + vars -> { final String fieldName = (String) vars.get("field"); final LeafDocLookup lookup = (LeafDocLookup) vars.get("doc"); - return lookup.get(fieldName).stream() - .map(value -> ((Number) value).longValue() + 1) - .collect(toList()); + return lookup.get(fieldName).stream().map(value -> ((Number) value).longValue() + 1).collect(toList()); } ); final MockScriptEngine engine = new MockScriptEngine(MockScriptEngine.NAME, scripts, emptyMap()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java index 815c2d2702e3a..58074ca1efe33 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java @@ -26,9 +26,9 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; @@ -112,10 +112,8 @@ public void testNumericDocValues() throws IOException { public void testSortedNumericDocValues() throws IOException { testAggregation(new DocValuesFieldExistsQuery(FIELD_NAME), iw -> { - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField(FIELD_NAME, 3), - new SortedNumericDocValuesField(FIELD_NAME, 4))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField(FIELD_NAME, 3), - new SortedNumericDocValuesField(FIELD_NAME, 4))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField(FIELD_NAME, 3), new SortedNumericDocValuesField(FIELD_NAME, 4))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField(FIELD_NAME, 3), new SortedNumericDocValuesField(FIELD_NAME, 4))); iw.addDocument(singleton(new SortedNumericDocValuesField(FIELD_NAME, 1))); }, count -> { assertEquals(15L, count.getValue(), 0d); @@ -138,20 +136,25 @@ public void testQueryFiltering() throws IOException { public void testStringField() throws IOException { IllegalStateException e = expectThrows(IllegalStateException.class, () -> { - testAggregation(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new SortedDocValuesField(FIELD_NAME, new BytesRef("1")))); - }, count -> { - assertEquals(0L, count.getValue(), 0d); - assertFalse(AggregationInspectionHelper.hasValue(count)); - }); + testAggregation( + new MatchAllDocsQuery(), + iw -> { iw.addDocument(singleton(new SortedDocValuesField(FIELD_NAME, new BytesRef("1")))); }, + count -> { + assertEquals(0L, count.getValue(), 0d); + assertFalse(AggregationInspectionHelper.hasValue(count)); + } + ); }); - assertEquals("unexpected docvalues type SORTED for field 'field' (expected one of [SORTED_NUMERIC, NUMERIC]). " + - "Re-index with correct docvalues type.", e.getMessage()); + assertEquals( + "unexpected docvalues type SORTED for field 'field' (expected one of [SORTED_NUMERIC, NUMERIC]). " + + "Re-index with correct docvalues type.", + e.getMessage() + ); } public void testSummationAccuracy() throws IOException { // Summing up a normal array and expect an accurate value - double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7}; + double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifySummationOfDoubles(values, 15.3, Double.MIN_NORMAL); // Summing up an array which contains NaN and infinities and expect a result same as naive summation @@ -181,47 +184,38 @@ public void testSummationAccuracy() throws IOException { } private void verifySummationOfDoubles(double[] values, double expected, double delta) throws IOException { - testAggregation( - sum("_name").field(FIELD_NAME), - new MatchAllDocsQuery(), - iw -> { - /* - * The sum agg uses a Kahan sumation on the shard to limit - * floating point errors. But it doesn't ship the sums to the - * coordinating node, so floaing point error can creep in when - * reducing many sums. The test framework aggregates each - * segment as though it were a separate shard, then reduces - * those togther. Fun. But it means we don't get the full - * accuracy of the Kahan sumation. And *that* accuracy is - * what this method is trying to test. So we have to stick - * all the documents on the same leaf. `addDocuments` does - * that. - */ - iw.addDocuments(Arrays.stream(values).mapToObj(value -> - singleton(new NumericDocValuesField(FIELD_NAME, NumericUtils.doubleToSortableLong(value))) - ).collect(toList())); - }, - result -> assertEquals(expected, result.getValue(), delta), - defaultFieldType(NumberType.DOUBLE) - ); + testAggregation(sum("_name").field(FIELD_NAME), new MatchAllDocsQuery(), iw -> { + /* + * The sum agg uses a Kahan sumation on the shard to limit + * floating point errors. But it doesn't ship the sums to the + * coordinating node, so floaing point error can creep in when + * reducing many sums. The test framework aggregates each + * segment as though it were a separate shard, then reduces + * those togther. Fun. But it means we don't get the full + * accuracy of the Kahan sumation. And *that* accuracy is + * what this method is trying to test. So we have to stick + * all the documents on the same leaf. `addDocuments` does + * that. + */ + iw.addDocuments( + Arrays.stream(values) + .mapToObj(value -> singleton(new NumericDocValuesField(FIELD_NAME, NumericUtils.doubleToSortableLong(value)))) + .collect(toList()) + ); + }, result -> assertEquals(expected, result.getValue(), delta), defaultFieldType(NumberType.DOUBLE)); } public void testUnmapped() throws IOException { - sumRandomDocsTestCase(randomIntBetween(1, 5), - sum("_name").field("unknown_field"), - (sum, docs, result) -> { - assertEquals(0d, result.getValue(), 0d); - assertFalse(AggregationInspectionHelper.hasValue(result)); - } - ); + sumRandomDocsTestCase(randomIntBetween(1, 5), sum("_name").field("unknown_field"), (sum, docs, result) -> { + assertEquals(0d, result.getValue(), 0d); + assertFalse(AggregationInspectionHelper.hasValue(result)); + }); } public void testPartiallyUnmapped() throws IOException { - final MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberType.LONG); + final MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberType.LONG); - final SumAggregationBuilder builder = sum("_name") - .field(fieldType.name()); + final SumAggregationBuilder builder = sum("_name").field(fieldType.name()); final int numDocs = randomIntBetween(10, 100); final List> docs = new ArrayList<>(numDocs); @@ -239,9 +233,11 @@ public void testPartiallyUnmapped() throws IOException { new RandomIndexWriter(random(), unmappedDirectory).close(); - try (IndexReader mappedReader = DirectoryReader.open(mappedDirectory); - IndexReader unmappedReader = DirectoryReader.open(unmappedDirectory); - MultiReader multiReader = new MultiReader(mappedReader, unmappedReader)) { + try ( + IndexReader mappedReader = DirectoryReader.open(mappedDirectory); + IndexReader unmappedReader = DirectoryReader.open(unmappedDirectory); + MultiReader multiReader = new MultiReader(mappedReader, unmappedReader) + ) { final IndexSearcher searcher = newSearcher(multiReader, true, true); @@ -253,10 +249,9 @@ public void testPartiallyUnmapped() throws IOException { } public void testValueScriptSingleValuedField() throws IOException { - sumRandomDocsTestCase(1, - sum("_name") - .field(FIELD_NAME) - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())), + sumRandomDocsTestCase( + 1, + sum("_name").field(FIELD_NAME).script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())), (sum, docs, result) -> { assertEquals(sum + docs.size(), result.getValue(), 0d); assertTrue(AggregationInspectionHelper.hasValue(result)); @@ -266,10 +261,9 @@ public void testValueScriptSingleValuedField() throws IOException { public void testValueScriptMultiValuedField() throws IOException { final int valuesPerField = randomIntBetween(2, 5); - sumRandomDocsTestCase(valuesPerField, - sum("_name") - .field(FIELD_NAME) - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())), + sumRandomDocsTestCase( + valuesPerField, + sum("_name").field(FIELD_NAME).script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())), (sum, docs, result) -> { assertEquals(sum + (docs.size() * valuesPerField), result.getValue(), 0d); assertTrue(AggregationInspectionHelper.hasValue(result)); @@ -278,9 +272,9 @@ public void testValueScriptMultiValuedField() throws IOException { } public void testFieldScriptSingleValuedField() throws IOException { - sumRandomDocsTestCase(1, - sum("_name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", FIELD_NAME))), + sumRandomDocsTestCase( + 1, + sum("_name").script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", FIELD_NAME))), (sum, docs, result) -> { assertEquals(sum + docs.size(), result.getValue(), 0d); assertTrue(AggregationInspectionHelper.hasValue(result)); @@ -290,9 +284,9 @@ public void testFieldScriptSingleValuedField() throws IOException { public void testFieldScriptMultiValuedField() throws IOException { final int valuesPerField = randomIntBetween(2, 5); - sumRandomDocsTestCase(valuesPerField, - sum("_name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", FIELD_NAME))), + sumRandomDocsTestCase( + valuesPerField, + sum("_name").script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", FIELD_NAME))), (sum, docs, result) -> { assertEquals(sum + (docs.size() * valuesPerField), result.getValue(), 0d); assertTrue(AggregationInspectionHelper.hasValue(result)); @@ -302,8 +296,7 @@ public void testFieldScriptMultiValuedField() throws IOException { public void testMissing() throws IOException { final MappedFieldType aggField = defaultFieldType(); - final MappedFieldType irrelevantField - = new NumberFieldMapper.NumberFieldType("irrelevant_field", NumberType.LONG); + final MappedFieldType irrelevantField = new NumberFieldMapper.NumberFieldType("irrelevant_field", NumberType.LONG); final int numDocs = randomIntBetween(10, 100); final long missingValue = randomLongBetween(1, 1000); @@ -328,26 +321,25 @@ public void testMissing() throws IOException { internalSum -> { assertEquals(finalSum, internalSum.getValue(), 0d); assertTrue(AggregationInspectionHelper.hasValue(internalSum)); - }, aggField, irrelevantField + }, + aggField, + irrelevantField ); } public void testMissingUnmapped() throws IOException { final long missingValue = randomLongBetween(1, 1000); - sumRandomDocsTestCase(randomIntBetween(1, 5), - sum("_name") - .field("unknown_field") - .missing(missingValue), - (sum, docs, result) -> { - assertEquals(docs.size() * missingValue, result.getValue(), 0d); - assertTrue(AggregationInspectionHelper.hasValue(result)); - } - ); + sumRandomDocsTestCase(randomIntBetween(1, 5), sum("_name").field("unknown_field").missing(missingValue), (sum, docs, result) -> { + assertEquals(docs.size() * missingValue, result.getValue(), 0d); + assertTrue(AggregationInspectionHelper.hasValue(result)); + }); } - private void sumRandomDocsTestCase(int valuesPerField, - SumAggregationBuilder builder, - TriConsumer>, InternalSum> verify) throws IOException { + private void sumRandomDocsTestCase( + int valuesPerField, + SumAggregationBuilder builder, + TriConsumer>, InternalSum> verify + ) throws IOException { final MappedFieldType fieldType = defaultFieldType(); @@ -374,43 +366,42 @@ private void sumRandomDocsTestCase(int valuesPerField, ); } - private void testAggregation(Query query, - CheckedConsumer indexer, - Consumer verify) throws IOException { + private void testAggregation(Query query, CheckedConsumer indexer, Consumer verify) + throws IOException { AggregationBuilder aggregationBuilder = sum("_name").field(FIELD_NAME); - testAggregation( aggregationBuilder, query, indexer, verify, defaultFieldType()); + testAggregation(aggregationBuilder, query, indexer, verify, defaultFieldType()); } - - private void testAggregation(AggregationBuilder aggregationBuilder, Query query, + private void testAggregation( + AggregationBuilder aggregationBuilder, + Query query, CheckedConsumer indexer, - Consumer verify, MappedFieldType... fieldTypes) throws IOException { + Consumer verify, + MappedFieldType... fieldTypes + ) throws IOException { testCase(aggregationBuilder, query, indexer, verify, fieldTypes); } @Override protected List getSupportedValuesSourceTypes() { - return List.of(CoreValuesSourceType.NUMERIC, - CoreValuesSourceType.BOOLEAN, - CoreValuesSourceType.DATE); + return List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.DATE); } @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new SumAggregationBuilder("_name") - .field(fieldName); + return new SumAggregationBuilder("_name").field(fieldName); } @Override protected ScriptService getMockScriptService() { final Map, Object>> scripts = Map.of( - VALUE_SCRIPT_NAME, vars -> ((Number) vars.get("_value")).doubleValue() + 1, - FIELD_SCRIPT_NAME, vars -> { + VALUE_SCRIPT_NAME, + vars -> ((Number) vars.get("_value")).doubleValue() + 1, + FIELD_SCRIPT_NAME, + vars -> { final String fieldName = (String) vars.get("field"); final LeafDocLookup lookup = (LeafDocLookup) vars.get("doc"); - return lookup.get(fieldName).stream() - .map(value -> ((Number) value).longValue() + 1) - .collect(toList()); + return lookup.get(fieldName).stream().map(value -> ((Number) value).longValue() + 1).collect(toList()); } ); final MockScriptEngine engine = new MockScriptEngine(MockScriptEngine.NAME, scripts, emptyMap()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java index d387e2db57e8e..6b99e20f4a399 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java @@ -30,27 +30,22 @@ import java.util.Iterator; import java.util.List; - public class TDigestPercentileRanksAggregatorTests extends AggregatorTestCase { @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new PercentileRanksAggregationBuilder("tdigest_ranks", new double[]{0.1, 0.5, 12}) - .field(fieldName) + return new PercentileRanksAggregationBuilder("tdigest_ranks", new double[] { 0.1, 0.5, 12 }).field(fieldName) .percentilesConfig(new PercentilesConfig.TDigest()); } @Override protected List getSupportedValuesSourceTypes() { - return List.of(CoreValuesSourceType.NUMERIC, - CoreValuesSourceType.DATE, - CoreValuesSourceType.BOOLEAN); + return List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN); } public void testEmpty() throws IOException { - PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.5}) - .field("field") - .method(PercentilesMethod.TDIGEST); + PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.5 }).field("field") + .method(PercentilesMethod.TDIGEST); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = new MultiReader()) { IndexSearcher searcher = new IndexSearcher(reader); @@ -58,22 +53,21 @@ public void testEmpty() throws IOException { Percentile rank = ranks.iterator().next(); assertEquals(Double.NaN, rank.getPercent(), 0d); assertEquals(0.5, rank.getValue(), 0d); - assertFalse(AggregationInspectionHelper.hasValue(((InternalTDigestPercentileRanks)ranks))); + assertFalse(AggregationInspectionHelper.hasValue(((InternalTDigestPercentileRanks) ranks))); } } public void testSimple() throws IOException { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (double value : new double[] {3, 0.2, 10}) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + for (double value : new double[] { 3, 0.2, 10 }) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); } - PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.1, 0.5, 12}) - .field("field") - .method(PercentilesMethod.TDIGEST); + PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.1, 0.5, 12 }) + .field("field") + .method(PercentilesMethod.TDIGEST); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); @@ -94,20 +88,24 @@ public void testSimple() throws IOException { // https://github.com/elastic/elasticsearch/issues/14851 // assertThat(rank.getPercent(), Matchers.equalTo(100d)); assertFalse(rankIterator.hasNext()); - assertTrue(AggregationInspectionHelper.hasValue(((InternalTDigestPercentileRanks)ranks))); + assertTrue(AggregationInspectionHelper.hasValue(((InternalTDigestPercentileRanks) ranks))); } } } public void testNullValues() throws IOException { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new PercentileRanksAggregationBuilder("my_agg", null).field("field").method(PercentilesMethod.TDIGEST)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new PercentileRanksAggregationBuilder("my_agg", null).field("field").method(PercentilesMethod.TDIGEST) + ); assertThat(e.getMessage(), Matchers.equalTo("[values] must not be null: [my_agg]")); } public void testEmptyValues() throws IOException { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new PercentileRanksAggregationBuilder("my_agg", new double[0]).field("field").method(PercentilesMethod.TDIGEST)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new PercentileRanksAggregationBuilder("my_agg", new double[0]).field("field").method(PercentilesMethod.TDIGEST) + ); assertThat(e.getMessage(), Matchers.equalTo("[values] must not be an empty array: [my_agg]")); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java index eeb9885414d2f..c50aee1b430b5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java @@ -41,16 +41,12 @@ public class TDigestPercentilesAggregatorTests extends AggregatorTestCase { @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new PercentilesAggregationBuilder("tdist_percentiles") - .field(fieldName) - .percentilesConfig(new PercentilesConfig.TDigest()); + return new PercentilesAggregationBuilder("tdist_percentiles").field(fieldName).percentilesConfig(new PercentilesConfig.TDigest()); } @Override protected List getSupportedValuesSourceTypes() { - return List.of(CoreValuesSourceType.NUMERIC, - CoreValuesSourceType.DATE, - CoreValuesSourceType.BOOLEAN); + return List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN); } public void testNoDocs() throws IOException { @@ -150,18 +146,22 @@ public void testQueryFiltering() throws IOException { public void testTdigestThenHdrSettings() throws Exception { int sigDigits = randomIntBetween(1, 5); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - percentiles("percentiles") - .compression(100.0) + percentiles("percentiles").compression(100.0) .method(PercentilesMethod.TDIGEST) .numberOfSignificantValueDigits(sigDigits) // <-- this should trigger an exception .field("value"); }); - assertThat(e.getMessage(), equalTo("Cannot set [numberOfSignificantValueDigits] because the " + - "method has already been configured for TDigest")); + assertThat( + e.getMessage(), + equalTo("Cannot set [numberOfSignificantValueDigits] because the " + "method has already been configured for TDigest") + ); } - private void testCase(Query query, CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testCase( + Query query, + CheckedConsumer buildIndex, + Consumer verify + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { buildIndex.accept(indexWriter); @@ -179,8 +179,7 @@ private void testCase(Query query, CheckedConsumer 2 * Integer.MAX_VALUE); - final double[] quantiles = new double[] { 0, 0.1, 0.5, 0.9, 1, randomDouble()}; + final double[] quantiles = new double[] { 0, 0.1, 0.5, 0.9, 1, randomDouble() }; Arrays.sort(quantiles); double prev = Double.NEGATIVE_INFINITY; for (double q : quantiles) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java index f969f24d55ae1..b4961891c98c1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -60,14 +60,14 @@ public void testTopLevel() throws Exception { assertEquals("3", searchHits.getAt(0).getId()); assertEquals("2", searchHits.getAt(1).getId()); assertEquals("1", searchHits.getAt(2).getId()); - assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits)result))); + assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) result))); } public void testNoResults() throws Exception { TopHits result = (TopHits) testCase(new MatchNoDocsQuery(), topHits("_name").sort("string", SortOrder.DESC)); SearchHits searchHits = result.getHits(); assertEquals(0L, searchHits.getTotalHits().value); - assertFalse(AggregationInspectionHelper.hasValue(((InternalTopHits)result))); + assertFalse(AggregationInspectionHelper.hasValue(((InternalTopHits) result))); } /** @@ -77,14 +77,13 @@ public void testNoResults() throws Exception { public void testInsideTerms() throws Exception { Aggregation result; if (randomBoolean()) { - result = testCase(new MatchAllDocsQuery(), - terms("term").field("string") - .subAggregation(topHits("top").sort("string", SortOrder.DESC))); + result = testCase( + new MatchAllDocsQuery(), + terms("term").field("string").subAggregation(topHits("top").sort("string", SortOrder.DESC)) + ); } else { Query query = new QueryParser("string", new KeywordAnalyzer()).parse("d^1000 c^100 b^10 a^1"); - result = testCase(query, - terms("term").field("string") - .subAggregation(topHits("top"))); + result = testCase(query, terms("term").field("string").subAggregation(topHits("top"))); } Terms terms = (Terms) result; @@ -148,9 +147,12 @@ private Document document(String id, String... stringValues) { public void testSetScorer() throws Exception { Directory directory = newDirectory(); - IndexWriter w = new IndexWriter(directory, newIndexWriterConfig() + IndexWriter w = new IndexWriter( + directory, + newIndexWriterConfig() // only merge adjacent segments - .setMergePolicy(newLogMergePolicy())); + .setMergePolicy(newLogMergePolicy()) + ); // first window (see BooleanScorer) has matches on one clause only for (int i = 0; i < 2048; ++i) { Document doc = new Document(); @@ -178,10 +180,9 @@ public void testSetScorer() throws Exception { w.close(); IndexSearcher searcher = new IndexSearcher(reader); - Query query = new BooleanQuery.Builder() - .add(new TermQuery(new Term("string", "bar")), Occur.SHOULD) - .add(new TermQuery(new Term("string", "baz")), Occur.SHOULD) - .build(); + Query query = new BooleanQuery.Builder().add(new TermQuery(new Term("string", "bar")), Occur.SHOULD) + .add(new TermQuery(new Term("string", "baz")), Occur.SHOULD) + .build(); AggregationBuilder agg = AggregationBuilders.topHits("top_hits"); TopHits result = searchAndReduce(searcher, query, agg, STRING_FIELD_TYPE); assertEquals(3, result.getHits().getTotalHits().value); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java index bbd07e2f05c87..b343a34210636 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java @@ -114,8 +114,11 @@ protected final TopHitsAggregationBuilder createTestAggregatorBuilder() { fetchSourceContext = new FetchSourceContext(true, includes, excludes); break; case 2: - fetchSourceContext = new FetchSourceContext(true, new String[]{randomAlphaOfLengthBetween(5, 20)}, - new String[]{randomAlphaOfLengthBetween(5, 20)}); + fetchSourceContext = new FetchSourceContext( + true, + new String[] { randomAlphaOfLengthBetween(5, 20) }, + new String[] { randomAlphaOfLengthBetween(5, 20) } + ); break; case 3: fetchSourceContext = new FetchSourceContext(true, includes, excludes); @@ -124,7 +127,7 @@ protected final TopHitsAggregationBuilder createTestAggregatorBuilder() { fetchSourceContext = new FetchSourceContext(true, includes, null); break; case 5: - fetchSourceContext = new FetchSourceContext(true, new String[] {randomAlphaOfLengthBetween(5, 20)}, null); + fetchSourceContext = new FetchSourceContext(true, new String[] { randomAlphaOfLengthBetween(5, 20) }, null); break; default: throw new IllegalStateException(); @@ -136,57 +139,59 @@ protected final TopHitsAggregationBuilder createTestAggregatorBuilder() { for (int i = 0; i < numSorts; i++) { int branch = randomInt(5); switch (branch) { - case 0: - factory.sort(SortBuilders.fieldSort(randomAlphaOfLengthBetween(5, 20)).order(randomFrom(SortOrder.values()))); - break; - case 1: - factory.sort(SortBuilders.geoDistanceSort(randomAlphaOfLengthBetween(5, 20), AbstractQueryTestCase.randomGeohash(1, 12)) - .order(randomFrom(SortOrder.values()))); - break; - case 2: - factory.sort(SortBuilders.scoreSort().order(randomFrom(SortOrder.values()))); - break; - case 3: - factory.sort(SortBuilders.scriptSort(mockScript("foo"), ScriptSortType.NUMBER).order(randomFrom(SortOrder.values()))); - break; - case 4: - factory.sort(randomAlphaOfLengthBetween(5, 20)); - break; - case 5: - factory.sort(randomAlphaOfLengthBetween(5, 20), randomFrom(SortOrder.values())); - break; + case 0: + factory.sort(SortBuilders.fieldSort(randomAlphaOfLengthBetween(5, 20)).order(randomFrom(SortOrder.values()))); + break; + case 1: + factory.sort( + SortBuilders.geoDistanceSort(randomAlphaOfLengthBetween(5, 20), AbstractQueryTestCase.randomGeohash(1, 12)) + .order(randomFrom(SortOrder.values())) + ); + break; + case 2: + factory.sort(SortBuilders.scoreSort().order(randomFrom(SortOrder.values()))); + break; + case 3: + factory.sort( + SortBuilders.scriptSort(mockScript("foo"), ScriptSortType.NUMBER).order(randomFrom(SortOrder.values())) + ); + break; + case 4: + factory.sort(randomAlphaOfLengthBetween(5, 20)); + break; + case 5: + factory.sort(randomAlphaOfLengthBetween(5, 20), randomFrom(SortOrder.values())); + break; } } } if (randomBoolean()) { // parent test shuffles xContent, we need to make sure highlight fields are ordered - factory.highlighter( - HighlightBuilderTests.randomHighlighterBuilder().useExplicitFieldOrder(true)); + factory.highlighter(HighlightBuilderTests.randomHighlighterBuilder().useExplicitFieldOrder(true)); } return factory; } - public void testFailWithSubAgg() throws Exception { - String source = "{\n" + - " \"top-tags\": {\n" + - " \"terms\": {\n" + - " \"field\": \"tags\"\n" + - " },\n" + - " \"aggs\": {\n" + - " \"top_tags_hits\": {\n" + - " \"top_hits\": {},\n" + - " \"aggs\": {\n" + - " \"max\": {\n" + - " \"max\": {\n" + - " \"field\": \"age\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + String source = "{\n" + + " \"top-tags\": {\n" + + " \"terms\": {\n" + + " \"field\": \"tags\"\n" + + " },\n" + + " \"aggs\": {\n" + + " \"top_tags_hits\": {\n" + + " \"top_hits\": {},\n" + + " \"aggs\": {\n" + + " \"max\": {\n" + + " \"max\": {\n" + + " \"field\": \"age\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; XContentParser parser = createParser(JsonXContent.jsonXContent, source); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); Exception e = expectThrows(AggregationInitializationException.class, () -> AggregatorFactories.parseAggregators(parser)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java index fd554c658880a..068d65dd240a3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java @@ -22,9 +22,9 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper; @@ -93,15 +93,12 @@ protected ScriptService getMockScriptService() { scripts.put(NUMBER_VALUE_SCRIPT, vars -> (((Number) vars.get("_value")).doubleValue() + 1)); scripts.put(SINGLE_SCRIPT, vars -> 1); - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, - scripts, - Collections.emptyMap()); + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, scripts, Collections.emptyMap()); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); } - public void testGeoField() throws IOException { testAggregation(new MatchAllDocsQuery(), ValueType.GEOPOINT, iw -> { for (int i = 0; i < 10; i++) { @@ -199,8 +196,7 @@ public void testQueryFiltersAll() throws IOException { } public void testUnmappedMissingString() throws IOException { - ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name") - .field("number").missing("🍌🍌🍌"); + ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name").field("number").missing("🍌🍌🍌"); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("unrelatedField", 7))); @@ -213,8 +209,7 @@ public void testUnmappedMissingString() throws IOException { } public void testUnmappedMissingNumber() throws IOException { - ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name") - .field("number").missing(1234); + ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name").field("number").missing(1234); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("unrelatedField", 7))); @@ -227,8 +222,8 @@ public void testUnmappedMissingNumber() throws IOException { } public void testUnmappedMissingGeoPoint() throws IOException { - ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name") - .field("number").missing(new GeoPoint(42.39561, -71.13051)); + ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name").field("number") + .missing(new GeoPoint(42.39561, -71.13051)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("unrelatedField", 7))); @@ -247,7 +242,7 @@ public void testRangeFieldValues() throws IOException { final String fieldName = "rangeField"; MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, rangeType); final ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("_name").field(fieldName); - testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(singleton(range1))))); iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(singleton(range1))))); iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(singleton(range2))))); @@ -259,8 +254,7 @@ public void testRangeFieldValues() throws IOException { } public void testValueScriptNumber() throws IOException { - ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name") - .field(FIELD_NAME) + ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name").field(FIELD_NAME) .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, NUMBER_VALUE_SCRIPT, Collections.emptyMap())); MappedFieldType fieldType = createMappedFieldType(FIELD_NAME, ValueType.NUMERIC); @@ -276,8 +270,9 @@ public void testValueScriptNumber() throws IOException { } public void testSingleScriptNumber() throws IOException { - ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SINGLE_SCRIPT, Collections.emptyMap())); + ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SINGLE_SCRIPT, Collections.emptyMap()) + ); MappedFieldType fieldType = createMappedFieldType(FIELD_NAME, ValueType.NUMERIC); @@ -305,8 +300,7 @@ public void testSingleScriptNumber() throws IOException { } public void testValueScriptString() throws IOException { - ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name") - .field(FIELD_NAME) + ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name").field(FIELD_NAME) .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, STRING_VALUE_SCRIPT, Collections.emptyMap())); MappedFieldType fieldType = createMappedFieldType(FIELD_NAME, ValueType.STRING); @@ -322,8 +316,9 @@ public void testValueScriptString() throws IOException { } public void testSingleScriptString() throws IOException { - ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SINGLE_SCRIPT, Collections.emptyMap())); + ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SINGLE_SCRIPT, Collections.emptyMap()) + ); MappedFieldType fieldType = createMappedFieldType(FIELD_NAME, ValueType.STRING); @@ -351,19 +346,24 @@ public void testSingleScriptString() throws IOException { }, fieldType); } - private void testAggregation(Query query, - ValueType valueType, - CheckedConsumer indexer, - Consumer verify) throws IOException { + private void testAggregation( + Query query, + ValueType valueType, + CheckedConsumer indexer, + Consumer verify + ) throws IOException { // Test both with and without the userValueTypeHint testAggregation(query, valueType, indexer, verify, true); testAggregation(query, valueType, indexer, verify, false); } - private void testAggregation(Query query, - ValueType valueType, - CheckedConsumer indexer, - Consumer verify, boolean testWithHint) throws IOException { + private void testAggregation( + Query query, + ValueType valueType, + CheckedConsumer indexer, + Consumer verify, + boolean testWithHint + ) throws IOException { MappedFieldType fieldType = createMappedFieldType(FIELD_NAME, valueType); ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("_name"); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java index cd249acda2186..622ecfc1090b8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java @@ -43,8 +43,7 @@ public class WeightedAvgAggregatorTests extends AggregatorTestCase { public void testNoDocs() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { // Intentionally not writing any docs @@ -57,8 +56,7 @@ public void testNoDocs() throws IOException { public void testNoMatchingField() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 7))); @@ -72,8 +70,7 @@ public void testNoMatchingField() throws IOException { public void testUnmappedWeight() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField("value_field", 7))); @@ -87,8 +84,7 @@ public void testUnmappedWeight() throws IOException { public void testUnmappedValue() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField("weight_field", 7))); @@ -102,16 +98,18 @@ public void testUnmappedValue() throws IOException { public void testSomeMatchesSortedNumericDocValuesNoWeight() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 7), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3), - new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 7), new SortedNumericDocValuesField("weight_field", 1)) + ); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 2), new SortedNumericDocValuesField("weight_field", 1)) + ); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 3), new SortedNumericDocValuesField("weight_field", 1)) + ); }, avg -> { assertEquals(4, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); @@ -121,16 +119,18 @@ public void testSomeMatchesSortedNumericDocValuesNoWeight() throws IOException { public void testSomeMatchesSortedNumericDocValuesWeights() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 7), - new SortedNumericDocValuesField("weight_field", 2))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2), - new SortedNumericDocValuesField("weight_field", 3))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3), - new SortedNumericDocValuesField("weight_field", 3))); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 7), new SortedNumericDocValuesField("weight_field", 2)) + ); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 2), new SortedNumericDocValuesField("weight_field", 3)) + ); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 3), new SortedNumericDocValuesField("weight_field", 3)) + ); }, avg -> { // (7*2 + 2*3 + 3*3) / (2+3+3) == 3.625 @@ -142,16 +142,12 @@ public void testSomeMatchesSortedNumericDocValuesWeights() throws IOException { public void testSomeMatchesNumericDocValues() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(new DocValuesFieldExistsQuery("value_field"), aggregationBuilder, iw -> { - iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 7), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 2), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 3), - new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 7), new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 2), new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 3), new SortedNumericDocValuesField("weight_field", 1))); }, avg -> { assertEquals(4, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); @@ -161,16 +157,30 @@ public void testSomeMatchesNumericDocValues() throws IOException { public void testQueryFiltering() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(IntPoint.newRangeQuery("value_field", 0, 3), aggregationBuilder, iw -> { - iw.addDocument(Arrays.asList(new IntPoint("value_field", 7), new SortedNumericDocValuesField("value_field", 7), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new IntPoint("value_field", 1), new SortedNumericDocValuesField("value_field", 2), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new IntPoint("value_field", 3), new SortedNumericDocValuesField("value_field", 3), - new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument( + Arrays.asList( + new IntPoint("value_field", 7), + new SortedNumericDocValuesField("value_field", 7), + new SortedNumericDocValuesField("weight_field", 1) + ) + ); + iw.addDocument( + Arrays.asList( + new IntPoint("value_field", 1), + new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 1) + ) + ); + iw.addDocument( + Arrays.asList( + new IntPoint("value_field", 3), + new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 1) + ) + ); }, avg -> { assertEquals(2.5, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); @@ -180,18 +190,32 @@ public void testQueryFiltering() throws IOException { public void testQueryFilteringWeights() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(IntPoint.newRangeQuery("filter_field", 0, 3), aggregationBuilder, iw -> { - iw.addDocument(Arrays.asList(new IntPoint("filter_field", 7), new SortedNumericDocValuesField("value_field", 7), - new SortedNumericDocValuesField("weight_field", 2))); - iw.addDocument(Arrays.asList(new IntPoint("filter_field", 2), new SortedNumericDocValuesField("value_field", 2), - new SortedNumericDocValuesField("weight_field", 3))); - iw.addDocument(Arrays.asList(new IntPoint("filter_field", 3), new SortedNumericDocValuesField("value_field", 3), - new SortedNumericDocValuesField("weight_field", 4))); + iw.addDocument( + Arrays.asList( + new IntPoint("filter_field", 7), + new SortedNumericDocValuesField("value_field", 7), + new SortedNumericDocValuesField("weight_field", 2) + ) + ); + iw.addDocument( + Arrays.asList( + new IntPoint("filter_field", 2), + new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 3) + ) + ); + iw.addDocument( + Arrays.asList( + new IntPoint("filter_field", 3), + new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 4) + ) + ); }, avg -> { - double value = (2.0*3.0 + 3.0*4.0) / (3.0+4.0); + double value = (2.0 * 3.0 + 3.0 * 4.0) / (3.0 + 4.0); assertEquals(value, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); }); @@ -200,8 +224,7 @@ public void testQueryFilteringWeights() throws IOException { public void testQueryFiltersAll() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(IntPoint.newRangeQuery("value_field", -1, 0), aggregationBuilder, iw -> { iw.addDocument(Arrays.asList(new IntPoint("value_field", 7), new SortedNumericDocValuesField("value_field", 7))); @@ -216,16 +239,30 @@ public void testQueryFiltersAll() throws IOException { public void testQueryFiltersAllWeights() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(IntPoint.newRangeQuery("value_field", -1, 0), aggregationBuilder, iw -> { - iw.addDocument(Arrays.asList(new IntPoint("filter_field", 7), new SortedNumericDocValuesField("value_field", 7), - new SortedNumericDocValuesField("weight_field", 2))); - iw.addDocument(Arrays.asList(new IntPoint("filter_field", 2), new SortedNumericDocValuesField("value_field", 2), - new SortedNumericDocValuesField("weight_field", 3))); - iw.addDocument(Arrays.asList(new IntPoint("filter_field", 3), new SortedNumericDocValuesField("value_field", 3), - new SortedNumericDocValuesField("weight_field", 4))); + iw.addDocument( + Arrays.asList( + new IntPoint("filter_field", 7), + new SortedNumericDocValuesField("value_field", 7), + new SortedNumericDocValuesField("weight_field", 2) + ) + ); + iw.addDocument( + Arrays.asList( + new IntPoint("filter_field", 2), + new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 3) + ) + ); + iw.addDocument( + Arrays.asList( + new IntPoint("filter_field", 3), + new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 4) + ) + ); }, avg -> { assertEquals(Double.NaN, avg.getValue(), 0); assertFalse(AggregationInspectionHelper.hasValue(avg)); @@ -233,20 +270,18 @@ public void testQueryFiltersAllWeights() throws IOException { } public void testValueSetMissing() throws IOException { - MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder() - .setFieldName("value_field") + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field") .setMissing(2) .build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("weight_field", 2))); iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("weight_field", 3))); iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("weight_field", 4))); }, avg -> { - double value = (2.0*2.0 + 2.0*3.0 + 2.0*4.0) / (2.0+3.0+4.0); + double value = (2.0 * 2.0 + 2.0 * 3.0 + 2.0 * 4.0) / (2.0 + 3.0 + 4.0); assertEquals(value, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); }); @@ -254,19 +289,17 @@ public void testValueSetMissing() throws IOException { public void testWeightSetMissing() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); - MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder() - .setFieldName("weight_field") + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field") .setMissing(2) .build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("value_field", 2))); iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("value_field", 3))); iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("value_field", 4))); }, avg -> { - double value = (2.0*2.0 + 3.0*2.0 + 4.0*2.0) / (2.0+2.0+2.0); + double value = (2.0 * 2.0 + 3.0 * 2.0 + 4.0 * 2.0) / (2.0 + 2.0 + 2.0); assertEquals(value, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); }); @@ -274,70 +307,84 @@ public void testWeightSetMissing() throws IOException { public void testWeightSetTimezone() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); - MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder() - .setFieldName("weight_field") + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field") .setTimeZone(ZoneOffset.UTC) .build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, () -> testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 4), - new SortedNumericDocValuesField("weight_field", 1))); - }, avg -> { - fail("Should not have executed test case"); - })); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 2), new SortedNumericDocValuesField("weight_field", 1)) + ); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 3), new SortedNumericDocValuesField("weight_field", 1)) + ); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 4), new SortedNumericDocValuesField("weight_field", 1)) + ); + }, avg -> { fail("Should not have executed test case"); }) + ); assertThat(e.getMessage(), equalTo("Field [weight_field] of type [long] does not support custom time zones")); } public void testValueSetTimezone() throws IOException { - MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder() - .setFieldName("value_field") + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field") .setTimeZone(ZoneOffset.UTC) .build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, () -> testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 4), - new SortedNumericDocValuesField("weight_field", 1))); - }, avg -> { - fail("Should not have executed test case"); - })); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 2), new SortedNumericDocValuesField("weight_field", 1)) + ); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 3), new SortedNumericDocValuesField("weight_field", 1)) + ); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 4), new SortedNumericDocValuesField("weight_field", 1)) + ); + }, avg -> { fail("Should not have executed test case"); }) + ); assertThat(e.getMessage(), equalTo("Field [value_field] of type [long] does not support custom time zones")); } public void testMultiValues() throws IOException { - MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder() - .setFieldName("value_field") - .build(); + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2), - new SortedNumericDocValuesField("value_field", 3), new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3), - new SortedNumericDocValuesField("value_field", 4), new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 4), - new SortedNumericDocValuesField("value_field", 5), new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument( + Arrays.asList( + new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 1) + ) + ); + iw.addDocument( + Arrays.asList( + new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("value_field", 4), + new SortedNumericDocValuesField("weight_field", 1) + ) + ); + iw.addDocument( + Arrays.asList( + new SortedNumericDocValuesField("value_field", 4), + new SortedNumericDocValuesField("value_field", 5), + new SortedNumericDocValuesField("weight_field", 1) + ) + ); }, avg -> { - double value = (((2.0+3.0)/2.0) + ((3.0+4.0)/2.0) + ((4.0+5.0)/2.0)) / (1.0+1.0+1.0); + double value = (((2.0 + 3.0) / 2.0) + ((3.0 + 4.0) / 2.0) + ((4.0 + 5.0) / 2.0)) / (1.0 + 1.0 + 1.0); assertEquals(value, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); }); @@ -345,45 +392,61 @@ public void testMultiValues() throws IOException { public void testMultiWeight() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); - MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder() - .setFieldName("weight_field") - .build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); - AggregationExecutionException e = expectThrows(AggregationExecutionException.class, + AggregationExecutionException e = expectThrows( + AggregationExecutionException.class, () -> testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { - iw.addDocument(Arrays.asList( - new SortedNumericDocValuesField("value_field", 2), - new SortedNumericDocValuesField("weight_field", 2), new SortedNumericDocValuesField("weight_field", 3))); - iw.addDocument(Arrays.asList( - new SortedNumericDocValuesField("value_field", 3), - new SortedNumericDocValuesField("weight_field", 3), new SortedNumericDocValuesField("weight_field", 4))); - iw.addDocument(Arrays.asList( - new SortedNumericDocValuesField("value_field", 4), - new SortedNumericDocValuesField("weight_field", 4), new SortedNumericDocValuesField("weight_field", 5))); - }, avg -> { - fail("Should have thrown exception"); - })); - assertThat(e.getMessage(), containsString("Encountered more than one weight for a single document. " + - "Use a script to combine multiple weights-per-doc into a single value.")); + iw.addDocument( + Arrays.asList( + new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 2), + new SortedNumericDocValuesField("weight_field", 3) + ) + ); + iw.addDocument( + Arrays.asList( + new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 3), + new SortedNumericDocValuesField("weight_field", 4) + ) + ); + iw.addDocument( + Arrays.asList( + new SortedNumericDocValuesField("value_field", 4), + new SortedNumericDocValuesField("weight_field", 4), + new SortedNumericDocValuesField("weight_field", 5) + ) + ); + }, avg -> { fail("Should have thrown exception"); }) + ); + assertThat( + e.getMessage(), + containsString( + "Encountered more than one weight for a single document. " + + "Use a script to combine multiple weights-per-doc into a single value." + ) + ); } public void testFormatter() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig) .format("0.00%"); testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 7), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2), - new SortedNumericDocValuesField("weight_field", 1))); - iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3), - new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 7), new SortedNumericDocValuesField("weight_field", 1)) + ); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 2), new SortedNumericDocValuesField("weight_field", 1)) + ); + iw.addDocument( + Arrays.asList(new SortedNumericDocValuesField("value_field", 3), new SortedNumericDocValuesField("weight_field", 1)) + ); }, avg -> { assertEquals(4, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); @@ -393,7 +456,7 @@ public void testFormatter() throws IOException { public void testSummationAccuracy() throws IOException { // Summing up a normal array and expect an accurate value - double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7}; + double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifyAvgOfDoubles(values, 0.9, 0d); // Summing up an array which contains NaN and infinities and expect a result same as naive summation @@ -425,31 +488,36 @@ public void testSummationAccuracy() throws IOException { private void verifyAvgOfDoubles(double[] values, double expected, double delta) throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name").value(valueConfig) .weight(weightConfig); - testCase(new MatchAllDocsQuery(), aggregationBuilder, - iw -> { - for (double value : values) { - iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", NumericUtils.doubleToSortableLong(value)), - new SortedNumericDocValuesField("weight_field", NumericUtils.doubleToSortableLong(1.0)))); - } - }, - avg -> assertEquals(expected, avg.getValue(), delta), - NumberFieldMapper.NumberType.DOUBLE - ); + testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { + for (double value : values) { + iw.addDocument( + Arrays.asList( + new NumericDocValuesField("value_field", NumericUtils.doubleToSortableLong(value)), + new SortedNumericDocValuesField("weight_field", NumericUtils.doubleToSortableLong(1.0)) + ) + ); + } + }, avg -> assertEquals(expected, avg.getValue(), delta), NumberFieldMapper.NumberType.DOUBLE); } - private void testCase(Query query, WeightedAvgAggregationBuilder aggregationBuilder, - CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testCase( + Query query, + WeightedAvgAggregationBuilder aggregationBuilder, + CheckedConsumer buildIndex, + Consumer verify + ) throws IOException { testCase(query, aggregationBuilder, buildIndex, verify, NumberFieldMapper.NumberType.LONG); } - private void testCase(Query query, WeightedAvgAggregationBuilder aggregationBuilder, - CheckedConsumer buildIndex, - Consumer verify, - NumberFieldMapper.NumberType fieldNumberType) throws IOException { + private void testCase( + Query query, + WeightedAvgAggregationBuilder aggregationBuilder, + CheckedConsumer buildIndex, + Consumer verify, + NumberFieldMapper.NumberType fieldNumberType + ) throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregationBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregationBuilderTests.java index 91f99de585994..ee41024ee8abd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregationBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregationBuilderTests.java @@ -54,8 +54,7 @@ protected WeightedAvgAggregationBuilder doParseInstance(XContentParser parser) t protected WeightedAvgAggregationBuilder createTestInstance() { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); - WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder(aggregationName) - .value(valueConfig) + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder(aggregationName).value(valueConfig) .weight(weightConfig); return aggregationBuilder; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AbstractBucketMetricsTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AbstractBucketMetricsTestCase.java index 144fd90416e2b..c8a56f88ae47b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AbstractBucketMetricsTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AbstractBucketMetricsTestCase.java @@ -11,8 +11,8 @@ import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -public abstract class AbstractBucketMetricsTestCase> - extends BasePipelineAggregationTestCase { +public abstract class AbstractBucketMetricsTestCase> extends + BasePipelineAggregationTestCase { @Override protected final PAF createTestAggregatorFactory() { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java index dc3838fe12b9b..19c7876a0f41b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java @@ -37,7 +37,6 @@ import java.util.Collections; import java.util.List; - public class AvgBucketAggregatorTests extends AggregatorTestCase { private static final String DATE_FIELD = "date"; private static final String VALUE_FIELD = "value"; @@ -52,7 +51,8 @@ public class AvgBucketAggregatorTests extends AggregatorTestCase { "2015-06-24T13:47:43", "2015-11-13T16:14:34", "2016-03-04T17:09:50", - "2017-12-12T22:55:46"); + "2017-12-12T22:55:46" + ); /** * Test for issue #30608. Under the following circumstances: @@ -72,13 +72,11 @@ public void testSameAggNames() throws IOException { Query query = new MatchAllDocsQuery(); AvgAggregationBuilder avgBuilder = new AvgAggregationBuilder("foo").field(VALUE_FIELD); - DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("histo") - .calendarInterval(DateHistogramInterval.YEAR) + DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("histo").calendarInterval(DateHistogramInterval.YEAR) .field(DATE_FIELD) .subAggregation(new AvgAggregationBuilder("foo").field(VALUE_FIELD)); - AvgBucketPipelineAggregationBuilder avgBucketBuilder - = new AvgBucketPipelineAggregationBuilder("the_avg_bucket", "histo>foo"); + AvgBucketPipelineAggregationBuilder avgBucketBuilder = new AvgBucketPipelineAggregationBuilder("the_avg_bucket", "histo>foo"); try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { @@ -102,13 +100,10 @@ public void testSameAggNames() throws IOException { DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD); - MappedFieldType valueFieldType - = new NumberFieldMapper.NumberFieldType(VALUE_FIELD, NumberFieldMapper.NumberType.LONG); + MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD, NumberFieldMapper.NumberType.LONG); - avgResult = searchAndReduce(indexSearcher, query, avgBuilder, 10000, - new MappedFieldType[]{fieldType, valueFieldType}); - histogramResult = searchAndReduce(indexSearcher, query, histo, 10000, - new MappedFieldType[]{fieldType, valueFieldType}); + avgResult = searchAndReduce(indexSearcher, query, avgBuilder, 10000, new MappedFieldType[] { fieldType, valueFieldType }); + histogramResult = searchAndReduce(indexSearcher, query, histo, 10000, new MappedFieldType[] { fieldType, valueFieldType }); } // Finally, reduce the pipeline agg @@ -119,7 +114,7 @@ public void testSameAggNames() throws IOException { reducedAggs.add(histogramResult); reducedAggs.add(avgResult); Aggregations aggregations = new Aggregations(reducedAggs); - InternalAggregation pipelineResult = ((AvgBucketPipelineAggregator)avgBucketAgg).doReduce(aggregations, null); + InternalAggregation pipelineResult = ((AvgBucketPipelineAggregator) avgBucketAgg).doReduce(aggregations, null); assertNotNull(pipelineResult); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketTests.java index d1b9be3bacc46..9275aff40a6ca 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketTests.java @@ -34,15 +34,26 @@ public void testValidate() { aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg - assertThat(validate(aggBuilders, new AvgBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( - "Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " aggregation does not exist for aggregation [name]: invalid_agg>metric;")); + assertThat( + validate(aggBuilders, new AvgBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( + "Validation Failed: 1: " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " aggregation does not exist for aggregation [name]: invalid_agg>metric;" + ) + ); // Now try to point to a single bucket agg - assertThat(validate(aggBuilders, new AvgBucketPipelineAggregationBuilder("name", "global>metric")), equalTo( - "Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() - + " for buckets path: global>metric;")); + assertThat( + validate(aggBuilders, new AvgBucketPipelineAggregationBuilder("name", "global>metric")), + equalTo( + "Validation Failed: 1: The first aggregation in " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " must be a multi-bucket aggregation for aggregation [name] found :" + + GlobalAggregationBuilder.class.getName() + + " for buckets path: global>metric;" + ) + ); // Now try to point to a valid multi-bucket agg which is valid assertThat(validate(aggBuilders, new AvgBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java index abffc1bbcb002..a061af9e95db9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java @@ -92,11 +92,18 @@ public Object getProperty(String containingAggName, List path) { } }; - AggregationExecutionException e = expectThrows(AggregationExecutionException.class, - () -> BucketHelpers.resolveBucketValue(agg, bucket, "foo>bar", BucketHelpers.GapPolicy.SKIP)); - - assertThat(e.getMessage(), equalTo("buckets_path must reference either a number value or a single value numeric " + - "metric aggregation, got: [Object[]] at aggregation [foo]")); + AggregationExecutionException e = expectThrows( + AggregationExecutionException.class, + () -> BucketHelpers.resolveBucketValue(agg, bucket, "foo>bar", BucketHelpers.GapPolicy.SKIP) + ); + + assertThat( + e.getMessage(), + equalTo( + "buckets_path must reference either a number value or a single value numeric " + + "metric aggregation, got: [Object[]] at aggregation [foo]" + ) + ); } public void testReturnMultiValueObject() { @@ -165,10 +172,17 @@ public Object getProperty(String containingAggName, List path) { } }; - AggregationExecutionException e = expectThrows(AggregationExecutionException.class, - () -> BucketHelpers.resolveBucketValue(agg, bucket, "foo>bar", BucketHelpers.GapPolicy.SKIP)); - - assertThat(e.getMessage(), equalTo("buckets_path must reference either a number value or a single value numeric " + - "metric aggregation, but [foo] contains multiple values. Please specify which to use.")); + AggregationExecutionException e = expectThrows( + AggregationExecutionException.class, + () -> BucketHelpers.resolveBucketValue(agg, bucket, "foo>bar", BucketHelpers.GapPolicy.SKIP) + ); + + assertThat( + e.getMessage(), + equalTo( + "buckets_path must reference either a number value or a single value numeric " + + "metric aggregation, but [foo] contains multiple values. Please specify which to use." + ) + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java index c57f59a1d9507..d7a98379547af 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java @@ -19,8 +19,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -50,26 +50,32 @@ public class BucketScriptAggregatorTests extends AggregatorTestCase { @Override protected ScriptService getMockScriptService() { - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, + MockScriptEngine scriptEngine = new MockScriptEngine( + MockScriptEngine.NAME, Collections.singletonMap(SCRIPT_NAME, script -> script.get("the_avg")), - Collections.emptyMap()); + Collections.emptyMap() + ); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); } public void testScript() throws IOException { - MappedFieldType fieldType - = new NumberFieldMapper.NumberFieldType("number_field", NumberFieldMapper.NumberType.INTEGER); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number_field", NumberFieldMapper.NumberType.INTEGER); MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("the_field"); - FiltersAggregationBuilder filters = new FiltersAggregationBuilder("placeholder", new MatchAllQueryBuilder()) - .subAggregation(new TermsAggregationBuilder("the_terms").userValueTypeHint(ValueType.STRING).field("the_field") - .subAggregation(new AvgAggregationBuilder("the_avg").field("number_field"))) - .subAggregation(new BucketScriptPipelineAggregationBuilder("bucket_script", - Collections.singletonMap("the_avg", "the_terms['test1']>the_avg.value"), - new Script(ScriptType.INLINE, MockScriptEngine.NAME, SCRIPT_NAME, Collections.emptyMap()))); - + FiltersAggregationBuilder filters = new FiltersAggregationBuilder("placeholder", new MatchAllQueryBuilder()).subAggregation( + new TermsAggregationBuilder("the_terms").userValueTypeHint(ValueType.STRING) + .field("the_field") + .subAggregation(new AvgAggregationBuilder("the_avg").field("number_field")) + ) + .subAggregation( + new BucketScriptPipelineAggregationBuilder( + "bucket_script", + Collections.singletonMap("the_avg", "the_terms['test1']>the_avg.value"), + new Script(ScriptType.INLINE, MockScriptEngine.NAME, SCRIPT_NAME, Collections.emptyMap()) + ) + ); testCase(filters, new MatchAllDocsQuery(), iw -> { Document doc = new Document(); @@ -81,15 +87,22 @@ public void testScript() throws IOException { doc.add(new SortedSetDocValuesField("the_field", new BytesRef("test2"))); doc.add(new SortedNumericDocValuesField("number_field", 55)); iw.addDocument(doc); - }, f -> { - assertThat(((InternalSimpleValue)(f.getBuckets().get(0).getAggregations().get("bucket_script"))).value, - equalTo(19.0)); - }, fieldType, fieldType1); + }, + f -> { + assertThat(((InternalSimpleValue) (f.getBuckets().get(0).getAggregations().get("bucket_script"))).value, equalTo(19.0)); + }, + fieldType, + fieldType1 + ); } - private void testCase(FiltersAggregationBuilder aggregationBuilder, Query query, - CheckedConsumer buildIndex, - Consumer verify, MappedFieldType... fieldType) throws IOException { + private void testCase( + FiltersAggregationBuilder aggregationBuilder, + Query query, + CheckedConsumer buildIndex, + Consumer verify, + MappedFieldType... fieldType + ) throws IOException { try (Directory directory = newDirectory()) { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilderTests.java index 6d99210f3ec23..cf021390ab8d6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilderTests.java @@ -31,7 +31,9 @@ protected BucketScriptPipelineAggregationBuilder createTestAggregatorFactory() { } public void testNoParent() { - assertThat(validate(emptyList(), new BucketScriptPipelineAggregationBuilder("foo", emptyMap(), new Script("foo"))), - equalTo("Validation Failed: 1: bucket_script aggregation [foo] must be declared inside of another aggregation;")); + assertThat( + validate(emptyList(), new BucketScriptPipelineAggregationBuilder("foo", emptyMap(), new Script("foo"))), + equalTo("Validation Failed: 1: bucket_script aggregation [foo] must be declared inside of another aggregation;") + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java index f70cf2a87b9df..408b6c84519e6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java @@ -38,8 +38,12 @@ protected BucketScriptPipelineAggregationBuilder createTestAggregatorFactory() { params.put("foo", "bar"); } ScriptType type = randomFrom(ScriptType.values()); - script = new Script(type, type == ScriptType.STORED ? null : randomFrom("my_lang", Script.DEFAULT_SCRIPT_LANG), - "script", params); + script = new Script( + type, + type == ScriptType.STORED ? null : randomFrom("my_lang", Script.DEFAULT_SCRIPT_LANG), + "script", + params + ); } BucketScriptPipelineAggregationBuilder factory = new BucketScriptPipelineAggregationBuilder(name, bucketsPaths, script); if (randomBoolean()) { @@ -51,48 +55,54 @@ protected BucketScriptPipelineAggregationBuilder createTestAggregatorFactory() { return factory; } - public void testParseBucketPath() throws IOException { + public void testParseBucketPath() throws IOException { XContentBuilder content = XContentFactory.jsonBuilder() .startObject() - .field("buckets_path", "_count") - .startObject("script") - .field("source", "value") - .field("lang", "expression") - .endObject() + .field("buckets_path", "_count") + .startObject("script") + .field("source", "value") + .field("lang", "expression") + .endObject() .endObject(); BucketScriptPipelineAggregationBuilder builder1 = BucketScriptPipelineAggregationBuilder.PARSER.parse( - createParser(content), "count"); - assertEquals(builder1.getBucketsPaths().length , 1); + createParser(content), + "count" + ); + assertEquals(builder1.getBucketsPaths().length, 1); assertEquals(builder1.getBucketsPaths()[0], "_count"); content = XContentFactory.jsonBuilder() .startObject() - .startObject("buckets_path") - .field("path1", "_count1") - .field("path2", "_count2") - .endObject() - .startObject("script") - .field("source", "value") - .field("lang", "expression") - .endObject() + .startObject("buckets_path") + .field("path1", "_count1") + .field("path2", "_count2") + .endObject() + .startObject("script") + .field("source", "value") + .field("lang", "expression") + .endObject() .endObject(); BucketScriptPipelineAggregationBuilder builder2 = BucketScriptPipelineAggregationBuilder.PARSER.parse( - createParser(content), "count"); - assertEquals(builder2.getBucketsPaths().length , 2); + createParser(content), + "count" + ); + assertEquals(builder2.getBucketsPaths().length, 2); assertEquals(builder2.getBucketsPaths()[0], "_count1"); assertEquals(builder2.getBucketsPaths()[1], "_count2"); content = XContentFactory.jsonBuilder() .startObject() - .array("buckets_path","_count1", "_count2") - .startObject("script") - .field("source", "value") - .field("lang", "expression") - .endObject() + .array("buckets_path", "_count1", "_count2") + .startObject("script") + .field("source", "value") + .field("lang", "expression") + .endObject() .endObject(); BucketScriptPipelineAggregationBuilder builder3 = BucketScriptPipelineAggregationBuilder.PARSER.parse( - createParser(content), "count"); - assertEquals(builder3.getBucketsPaths().length , 2); + createParser(content), + "count" + ); + assertEquals(builder3.getBucketsPaths().length, 2); assertEquals(builder3.getBucketsPaths()[0], "_count1"); assertEquals(builder3.getBucketsPaths()[1], "_count2"); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java index 81c419557b157..29949f24c87fa 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java @@ -39,8 +39,12 @@ protected BucketSelectorPipelineAggregationBuilder createTestAggregatorFactory() params.put("foo", "bar"); } ScriptType type = randomFrom(ScriptType.values()); - script = - new Script(type, type == ScriptType.STORED ? null : randomFrom("my_lang", Script.DEFAULT_SCRIPT_LANG), "script", params); + script = new Script( + type, + type == ScriptType.STORED ? null : randomFrom("my_lang", Script.DEFAULT_SCRIPT_LANG), + "script", + params + ); } BucketSelectorPipelineAggregationBuilder factory = new BucketSelectorPipelineAggregationBuilder(name, bucketsPaths, script); if (randomBoolean()) { @@ -50,7 +54,9 @@ protected BucketSelectorPipelineAggregationBuilder createTestAggregatorFactory() } public void testNoParent() { - assertThat(validate(emptyList(), new BucketSelectorPipelineAggregationBuilder("foo", emptyMap(), new Script("foo"))), - equalTo("Validation Failed: 1: bucket_selector aggregation [foo] must be declared inside of another aggregation;")); + assertThat( + validate(emptyList(), new BucketSelectorPipelineAggregationBuilder("foo", emptyMap(), new Script("foo"))), + equalTo("Validation Failed: 1: bucket_selector aggregation [foo] must be declared inside of another aggregation;") + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortTests.java index fbde38d091aa8..289898416744f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortTests.java @@ -52,32 +52,42 @@ protected BucketSortPipelineAggregationBuilder createTestAggregatorFactory() { } public void testNegativeFrom() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new BucketSortPipelineAggregationBuilder("foo", Collections.emptyList()).from(-1)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new BucketSortPipelineAggregationBuilder("foo", Collections.emptyList()).from(-1) + ); assertThat(e.getMessage(), equalTo("[from] must be a non-negative integer: [-1]")); } public void testNegativeSize() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new BucketSortPipelineAggregationBuilder("foo", Collections.emptyList()).size(-1)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new BucketSortPipelineAggregationBuilder("foo", Collections.emptyList()).size(-1) + ); assertThat(e.getMessage(), equalTo("[size] must be a positive integer: [-1]")); } public void testZeroSize() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new BucketSortPipelineAggregationBuilder("foo", Collections.emptyList()).size(0)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new BucketSortPipelineAggregationBuilder("foo", Collections.emptyList()).size(0) + ); assertThat(e.getMessage(), equalTo("[size] must be a positive integer: [0]")); } public void testNullGapPolicy() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new BucketSortPipelineAggregationBuilder("foo", Collections.emptyList()).gapPolicy(null)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new BucketSortPipelineAggregationBuilder("foo", Collections.emptyList()).gapPolicy(null) + ); assertThat(e.getMessage(), equalTo("[gap_policy] must not be null: [foo]")); } public void testNoParent() { List sorts = singletonList(new FieldSortBuilder("bar")); - assertThat(validate(emptyList(), new BucketSortPipelineAggregationBuilder("foo", sorts)), - equalTo("Validation Failed: 1: bucket_sort aggregation [foo] must be declared inside of another aggregation;")); + assertThat( + validate(emptyList(), new BucketSortPipelineAggregationBuilder("foo", sorts)), + equalTo("Validation Failed: 1: bucket_sort aggregation [foo] must be declared inside of another aggregation;") + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java index 819a3cb6345fc..214768de71acd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -19,8 +19,8 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -60,9 +60,10 @@ public class CumulativeSumAggregatorTests extends AggregatorTestCase { "2017-01-07T13:47:43", "2017-01-08T16:14:34", "2017-01-09T17:09:50", - "2017-01-10T22:55:46"); + "2017-01-10T22:55:46" + ); - private static final List datasetValues = Arrays.asList(1,2,3,4,5,6,7,8,9,10); + private static final List datasetValues = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); public void testSimple() throws IOException { Query query = new MatchAllDocsQuery(); @@ -73,8 +74,8 @@ public void testSimple() throws IOException { aggBuilder.subAggregation(new CumulativeSumPipelineAggregationBuilder("cusum", "the_avg")); executeTestCase(query, aggBuilder, histogram -> { - assertEquals(10, ((Histogram)histogram).getBuckets().size()); - List buckets = ((Histogram)histogram).getBuckets(); + assertEquals(10, ((Histogram) histogram).getBuckets().size()); + List buckets = ((Histogram) histogram).getBuckets(); double sum = 0.0; for (Histogram.Bucket bucket : buckets) { sum += ((InternalAvg) (bucket.getAggregations().get("the_avg"))).value(); @@ -97,19 +98,21 @@ public void testDerivative() throws IOException { aggBuilder.subAggregation(new CumulativeSumPipelineAggregationBuilder("cusum", "the_deriv")); executeTestCase(query, aggBuilder, histogram -> { - assertEquals(10, ((Histogram)histogram).getBuckets().size()); - List buckets = ((Histogram)histogram).getBuckets(); + assertEquals(10, ((Histogram) histogram).getBuckets().size()); + List buckets = ((Histogram) histogram).getBuckets(); double sum = 0.0; for (int i = 0; i < buckets.size(); i++) { if (i == 0) { - assertThat(((InternalSimpleValue)(buckets.get(i).getAggregations().get("cusum"))).value(), equalTo(0.0)); - assertTrue(AggregationInspectionHelper.hasValue(((InternalSimpleValue) (buckets.get(i) - .getAggregations().get("cusum"))))); + assertThat(((InternalSimpleValue) (buckets.get(i).getAggregations().get("cusum"))).value(), equalTo(0.0)); + assertTrue( + AggregationInspectionHelper.hasValue(((InternalSimpleValue) (buckets.get(i).getAggregations().get("cusum")))) + ); } else { sum += 1.0; - assertThat(((InternalSimpleValue)(buckets.get(i).getAggregations().get("cusum"))).value(), equalTo(sum)); - assertTrue(AggregationInspectionHelper.hasValue(((InternalSimpleValue) (buckets.get(i) - .getAggregations().get("cusum"))))); + assertThat(((InternalSimpleValue) (buckets.get(i).getAggregations().get("cusum"))).value(), equalTo(sum)); + assertTrue( + AggregationInspectionHelper.hasValue(((InternalSimpleValue) (buckets.get(i).getAggregations().get("cusum")))) + ); } } }); @@ -123,8 +126,8 @@ public void testCount() throws IOException { aggBuilder.subAggregation(new CumulativeSumPipelineAggregationBuilder("cusum", "_count")); executeTestCase(query, aggBuilder, histogram -> { - assertEquals(10, ((Histogram)histogram).getBuckets().size()); - List buckets = ((Histogram)histogram).getBuckets(); + assertEquals(10, ((Histogram) histogram).getBuckets().size()); + List buckets = ((Histogram) histogram).getBuckets(); double sum = 1.0; for (Histogram.Bucket bucket : buckets) { assertThat(((InternalSimpleValue) (bucket.getAggregations().get("cusum"))).value(), equalTo(sum)); @@ -146,14 +149,13 @@ public void testDocCount() throws IOException { int numValueBuckets = ((maxRandomValue - minRandomValue) / interval) + 1; long[] valueCounts = new long[numValueBuckets]; - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(VALUE_FIELD) + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(VALUE_FIELD) .interval(interval) .extendedBounds(minRandomValue, maxRandomValue); aggBuilder.subAggregation(new CumulativeSumPipelineAggregationBuilder("cusum", "_count")); executeTestCase(query, aggBuilder, histogram -> { - List buckets = ((Histogram)histogram).getBuckets(); + List buckets = ((Histogram) histogram).getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); @@ -196,15 +198,14 @@ public void testMetric() throws IOException { int numValueBuckets = ((maxRandomValue - minRandomValue) / interval) + 1; long[] valueCounts = new long[numValueBuckets]; - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(VALUE_FIELD) + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(VALUE_FIELD) .interval(interval) .extendedBounds(minRandomValue, maxRandomValue); aggBuilder.subAggregation(new SumAggregationBuilder("sum").field(VALUE_FIELD)); aggBuilder.subAggregation(new CumulativeSumPipelineAggregationBuilder("cusum", "sum")); executeTestCase(query, aggBuilder, histogram -> { - List buckets = ((Histogram)histogram).getBuckets(); + List buckets = ((Histogram) histogram).getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); @@ -249,14 +250,12 @@ public void testNoBuckets() throws IOException { Query query = new MatchNoDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(VALUE_FIELD) - .interval(interval); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(VALUE_FIELD).interval(interval); aggBuilder.subAggregation(new SumAggregationBuilder("sum").field(VALUE_FIELD)); aggBuilder.subAggregation(new CumulativeSumPipelineAggregationBuilder("cusum", "sum")); executeTestCase(query, aggBuilder, histogram -> { - List buckets = ((Histogram)histogram).getBuckets(); + List buckets = ((Histogram) histogram).getBuckets(); assertThat(buckets.size(), equalTo(0)); @@ -294,23 +293,26 @@ private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consume }); } - private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consumer verify, - CheckedConsumer setup) throws IOException { + private void executeTestCase( + Query query, + AggregationBuilder aggBuilder, + Consumer verify, + CheckedConsumer setup + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { - setup.accept(indexWriter); + setup.accept(indexWriter); } try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(HISTO_FIELD); - MappedFieldType valueFieldType - = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); + MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); InternalAggregation histogram; - histogram = searchAndReduce(indexSearcher, query, aggBuilder, new MappedFieldType[]{fieldType, valueFieldType}); + histogram = searchAndReduce(indexSearcher, query, aggBuilder, new MappedFieldType[] { fieldType, valueFieldType }); verify.accept(histogram); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java index 6517b37ef02bd..f9272a60a94d6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java @@ -32,22 +32,35 @@ protected CumulativeSumPipelineAggregationBuilder createTestAggregatorFactory() } public void testValidate() throws IOException { - assertThat(validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), - new CumulativeSumPipelineAggregationBuilder("name", "valid")), nullValue()); + assertThat( + validate( + PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), + new CumulativeSumPipelineAggregationBuilder("name", "valid") + ), + nullValue() + ); } public void testInvalidParent() throws IOException { AggregationBuilder parent = mock(AggregationBuilder.class); when(parent.getName()).thenReturn("name"); - assertThat(validate(parent, new CumulativeSumPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( + assertThat( + validate(parent, new CumulativeSumPipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( "Validation Failed: 1: cumulative_sum aggregation [name] must have a histogram, date_histogram " - + "or auto_date_histogram as parent;")); + + "or auto_date_histogram as parent;" + ) + ); } public void testNoParent() throws IOException { - assertThat(validate(emptyList(), new CumulativeSumPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( + assertThat( + validate(emptyList(), new CumulativeSumPipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( "Validation Failed: 1: cumulative_sum aggregation [name] must have a histogram, date_histogram " - + "or auto_date_histogram as parent but doesn't have a parent;")); + + "or auto_date_histogram as parent but doesn't have a parent;" + ) + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java index 3c5034756cd00..902ed9729bf33 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java @@ -69,7 +69,6 @@ public class DerivativeAggregatorTests extends AggregatorTestCase { private static Double[] firstDerivValueCounts_empty_rnd; private static long numDocsEmptyIdx_rnd; - private void setupValueCounts() { numDocsEmptyIdx = 0L; numDocsEmptyIdx_rnd = 0L; @@ -122,37 +121,36 @@ private void setupValueCounts() { public void testDocCountDerivative() throws IOException { setupValueCounts(); Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME).interval(interval); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count")); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("2nd_deriv", "deriv")); - executeTestCase(query, aggBuilder, - histogram -> { - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - List buckets = ((Histogram)histogram).getBuckets(); - assertThat(buckets.size(), equalTo(numValueBuckets)); - - for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); - SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); - if (i > 0) { - assertThat(docCountDeriv, notNullValue()); - assertThat(docCountDeriv.value(), equalTo((double) firstDerivValueCounts[i - 1])); - } else { - assertThat(docCountDeriv, nullValue()); - } - SimpleValue docCount2ndDeriv = bucket.getAggregations().get("2nd_deriv"); - if (i > 1) { - assertThat(docCount2ndDeriv, notNullValue()); - assertThat(docCount2ndDeriv.value(), equalTo((double) secondDerivValueCounts[i - 2])); - } else { - assertThat(docCount2ndDeriv, nullValue()); - } + executeTestCase(query, aggBuilder, histogram -> { + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + List buckets = ((Histogram) histogram).getBuckets(); + assertThat(buckets.size(), equalTo(numValueBuckets)); + + for (int i = 0; i < numValueBuckets; ++i) { + Histogram.Bucket bucket = buckets.get(i); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); + SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); + if (i > 0) { + assertThat(docCountDeriv, notNullValue()); + assertThat(docCountDeriv.value(), equalTo((double) firstDerivValueCounts[i - 1])); + } else { + assertThat(docCountDeriv, nullValue()); + } + SimpleValue docCount2ndDeriv = bucket.getAggregations().get("2nd_deriv"); + if (i > 1) { + assertThat(docCount2ndDeriv, notNullValue()); + assertThat(docCount2ndDeriv.value(), equalTo((double) secondDerivValueCounts[i - 2])); + } else { + assertThat(docCount2ndDeriv, nullValue()); } - }); + } + }); } /** @@ -161,489 +159,463 @@ public void testDocCountDerivative() throws IOException { public void testSingleValuedField_normalised() throws IOException { setupValueCounts(); Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME).interval(interval).minDocCount(0); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .minDocCount(0); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit("1ms")); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("2nd_deriv", "deriv").unit("10ms")); - executeTestCase(query, aggBuilder, - histogram -> { - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - List buckets = ((Histogram)histogram).getBuckets(); - assertThat(buckets.size(), equalTo(numValueBuckets)); - - for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); - Derivative docCountDeriv = bucket.getAggregations().get("deriv"); - if (i > 0) { - assertThat(docCountDeriv, notNullValue()); - assertThat(docCountDeriv.value(), closeTo((firstDerivValueCounts[i - 1]), 0.00001)); - assertThat(docCountDeriv.normalizedValue(), closeTo((double) (firstDerivValueCounts[i - 1]) / 5, 0.00001)); - } else { - assertThat(docCountDeriv, nullValue()); - } - Derivative docCount2ndDeriv = bucket.getAggregations().get("2nd_deriv"); - if (i > 1) { - assertThat(docCount2ndDeriv, notNullValue()); - assertThat(docCount2ndDeriv.value(), closeTo((secondDerivValueCounts[i - 2]), 0.00001)); - assertThat(docCount2ndDeriv.normalizedValue(), closeTo((double) (secondDerivValueCounts[i - 2]) * 2, 0.00001)); - } else { - assertThat(docCount2ndDeriv, nullValue()); - } + executeTestCase(query, aggBuilder, histogram -> { + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + List buckets = ((Histogram) histogram).getBuckets(); + assertThat(buckets.size(), equalTo(numValueBuckets)); + + for (int i = 0; i < numValueBuckets; ++i) { + Histogram.Bucket bucket = buckets.get(i); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); + Derivative docCountDeriv = bucket.getAggregations().get("deriv"); + if (i > 0) { + assertThat(docCountDeriv, notNullValue()); + assertThat(docCountDeriv.value(), closeTo((firstDerivValueCounts[i - 1]), 0.00001)); + assertThat(docCountDeriv.normalizedValue(), closeTo((double) (firstDerivValueCounts[i - 1]) / 5, 0.00001)); + } else { + assertThat(docCountDeriv, nullValue()); } - }); + Derivative docCount2ndDeriv = bucket.getAggregations().get("2nd_deriv"); + if (i > 1) { + assertThat(docCount2ndDeriv, notNullValue()); + assertThat(docCount2ndDeriv.value(), closeTo((secondDerivValueCounts[i - 2]), 0.00001)); + assertThat(docCount2ndDeriv.normalizedValue(), closeTo((double) (secondDerivValueCounts[i - 2]) * 2, 0.00001)); + } else { + assertThat(docCount2ndDeriv, nullValue()); + } + } + }); } - public void testSingleValueAggDerivative() throws IOException { setupValueCounts(); Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME).interval(interval); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval); aggBuilder.subAggregation(new SumAggregationBuilder("sum").field(SINGLE_VALUED_FIELD_NAME)); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "sum")); - executeTestCase(query, aggBuilder, - histogram -> { - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - List buckets = ((Histogram) histogram).getBuckets(); - assertThat(buckets.size(), equalTo(numValueBuckets)); - - Object[] propertiesKeys = (Object[]) histogram.getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) histogram.getProperty("_count"); - Object[] propertiesSumCounts = (Object[]) histogram.getProperty("sum.value"); - - Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets - // overwritten - for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); - Sum sum = bucket.getAggregations().get("sum"); - assertThat(sum, notNullValue()); - long expectedSum = valueCounts[i] * (i * interval); - assertThat(sum.getValue(), equalTo((double) expectedSum)); - SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); - if (i > 0) { - assertThat(sumDeriv, notNullValue()); - long sumDerivValue = expectedSum - expectedSumPreviousBucket; - assertThat(sumDeriv.value(), equalTo((double) sumDerivValue)); - assertThat(((InternalMultiBucketAggregation.InternalBucket)bucket).getProperty("histo", - AggregationPath.parse("deriv.value").getPathElementsAsStringList()), - equalTo((double) sumDerivValue)); - } else { - assertThat(sumDeriv, nullValue()); - } - expectedSumPreviousBucket = expectedSum; - assertThat(propertiesKeys[i], equalTo((double) i * interval)); - assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i])); - assertThat((double) propertiesSumCounts[i], equalTo((double) expectedSum)); + executeTestCase(query, aggBuilder, histogram -> { + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + List buckets = ((Histogram) histogram).getBuckets(); + assertThat(buckets.size(), equalTo(numValueBuckets)); + + Object[] propertiesKeys = (Object[]) histogram.getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) histogram.getProperty("_count"); + Object[] propertiesSumCounts = (Object[]) histogram.getProperty("sum.value"); + + Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets + // overwritten + for (int i = 0; i < numValueBuckets; ++i) { + Histogram.Bucket bucket = buckets.get(i); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); + Sum sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + long expectedSum = valueCounts[i] * (i * interval); + assertThat(sum.getValue(), equalTo((double) expectedSum)); + SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); + if (i > 0) { + assertThat(sumDeriv, notNullValue()); + long sumDerivValue = expectedSum - expectedSumPreviousBucket; + assertThat(sumDeriv.value(), equalTo((double) sumDerivValue)); + assertThat( + ((InternalMultiBucketAggregation.InternalBucket) bucket).getProperty( + "histo", + AggregationPath.parse("deriv.value").getPathElementsAsStringList() + ), + equalTo((double) sumDerivValue) + ); + } else { + assertThat(sumDeriv, nullValue()); } - }); + expectedSumPreviousBucket = expectedSum; + assertThat(propertiesKeys[i], equalTo((double) i * interval)); + assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i])); + assertThat((double) propertiesSumCounts[i], equalTo((double) expectedSum)); + } + }); } public void testMultiValueAggDerivative() throws IOException { setupValueCounts(); Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME).interval(interval); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval); aggBuilder.subAggregation(new StatsAggregationBuilder("stats").field(SINGLE_VALUED_FIELD_NAME)); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "stats.sum")); - executeTestCase(query, aggBuilder, - histogram -> { - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - List buckets = ((Histogram) histogram).getBuckets(); - assertThat(buckets.size(), equalTo(numValueBuckets)); - - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - Object[] propertiesKeys = (Object[]) histogram.getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) histogram.getProperty("_count"); - Object[] propertiesSumCounts = (Object[]) histogram.getProperty("stats.sum"); - - Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets - // overwritten - for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); - Stats stats = bucket.getAggregations().get("stats"); - assertThat(stats, notNullValue()); - long expectedSum = valueCounts[i] * (i * interval); - assertThat(stats.getSum(), equalTo((double) expectedSum)); - SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); - if (i > 0) { - assertThat(sumDeriv, notNullValue()); - long sumDerivValue = expectedSum - expectedSumPreviousBucket; - assertThat(sumDeriv.value(), equalTo((double) sumDerivValue)); - assertThat(((InternalMultiBucketAggregation.InternalBucket)bucket).getProperty("histo", - AggregationPath.parse("deriv.value").getPathElementsAsStringList()), - equalTo((double) sumDerivValue)); - } else { - assertThat(sumDeriv, nullValue()); - } - expectedSumPreviousBucket = expectedSum; - assertThat(propertiesKeys[i], equalTo((double) i * interval)); - assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i])); - assertThat((double) propertiesSumCounts[i], equalTo((double) expectedSum)); + executeTestCase(query, aggBuilder, histogram -> { + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + List buckets = ((Histogram) histogram).getBuckets(); + assertThat(buckets.size(), equalTo(numValueBuckets)); + + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + Object[] propertiesKeys = (Object[]) histogram.getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) histogram.getProperty("_count"); + Object[] propertiesSumCounts = (Object[]) histogram.getProperty("stats.sum"); + + Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets + // overwritten + for (int i = 0; i < numValueBuckets; ++i) { + Histogram.Bucket bucket = buckets.get(i); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]); + Stats stats = bucket.getAggregations().get("stats"); + assertThat(stats, notNullValue()); + long expectedSum = valueCounts[i] * (i * interval); + assertThat(stats.getSum(), equalTo((double) expectedSum)); + SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); + if (i > 0) { + assertThat(sumDeriv, notNullValue()); + long sumDerivValue = expectedSum - expectedSumPreviousBucket; + assertThat(sumDeriv.value(), equalTo((double) sumDerivValue)); + assertThat( + ((InternalMultiBucketAggregation.InternalBucket) bucket).getProperty( + "histo", + AggregationPath.parse("deriv.value").getPathElementsAsStringList() + ), + equalTo((double) sumDerivValue) + ); + } else { + assertThat(sumDeriv, nullValue()); } - }); + expectedSumPreviousBucket = expectedSum; + assertThat(propertiesKeys[i], equalTo((double) i * interval)); + assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i])); + assertThat((double) propertiesSumCounts[i], equalTo((double) expectedSum)); + } + }); } public void testUnmapped() throws IOException { setupValueCounts(); Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME).interval(interval); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count")); - executeTestCase(query, aggBuilder, - histogram -> { - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - List buckets = ((Histogram) histogram).getBuckets(); - assertThat(buckets.size(), equalTo(0)); - }, - indexWriter -> { - Document document = new Document(); - indexWriter.addDocument(document); - indexWriter.commit(); - }); + executeTestCase(query, aggBuilder, histogram -> { + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + List buckets = ((Histogram) histogram).getBuckets(); + assertThat(buckets.size(), equalTo(0)); + }, indexWriter -> { + Document document = new Document(); + indexWriter.addDocument(document); + indexWriter.commit(); + }); } public void testDocCountDerivativeWithGaps() throws IOException { setupValueCounts(); Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME).interval(1); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count")); - executeTestCase(query, aggBuilder, - histogram -> { - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - List buckets = ((Histogram) histogram).getBuckets(); - assertThat(buckets.size(), equalTo(valueCounts_empty.length)); - assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx)); - - for (int i = 0; i < valueCounts_empty.length; i++) { - Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); - SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); - if (firstDerivValueCounts_empty[i] == null) { - assertThat(docCountDeriv, nullValue()); - } else { - assertThat(docCountDeriv.value(), equalTo(firstDerivValueCounts_empty[i])); - } + executeTestCase(query, aggBuilder, histogram -> { + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + List buckets = ((Histogram) histogram).getBuckets(); + assertThat(buckets.size(), equalTo(valueCounts_empty.length)); + assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx)); + + for (int i = 0; i < valueCounts_empty.length; i++) { + Histogram.Bucket bucket = buckets.get(i); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); + SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); + if (firstDerivValueCounts_empty[i] == null) { + assertThat(docCountDeriv, nullValue()); + } else { + assertThat(docCountDeriv.value(), equalTo(firstDerivValueCounts_empty[i])); } - }, - indexWriter -> { - Document document = new Document(); - for (int i = 0; i < valueCounts_empty.length; i++) { - for (int docs = 0; docs < valueCounts_empty[i]; docs++) { - document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); - indexWriter.addDocument(document); - document.clear(); - numDocsEmptyIdx++; - } + } + }, indexWriter -> { + Document document = new Document(); + for (int i = 0; i < valueCounts_empty.length; i++) { + for (int docs = 0; docs < valueCounts_empty[i]; docs++) { + document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); + indexWriter.addDocument(document); + document.clear(); + numDocsEmptyIdx++; } - indexWriter.commit(); - }); + } + indexWriter.commit(); + }); } public void testDocCountDerivativeWithGaps_random() throws IOException { setupValueCounts(); Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME) + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME) .interval(1) .extendedBounds(0L, numBuckets_empty_rnd - 1); aggBuilder.subAggregation( - new DerivativePipelineAggregationBuilder("deriv", "_count") - .gapPolicy(randomFrom(BucketHelpers.GapPolicy.values())) + new DerivativePipelineAggregationBuilder("deriv", "_count").gapPolicy(randomFrom(BucketHelpers.GapPolicy.values())) ); - executeTestCase(query, aggBuilder, - histogram -> { - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - List buckets = ((Histogram) histogram).getBuckets(); - assertThat(buckets.size(), equalTo(numBuckets_empty_rnd)); - assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx_rnd)); - - for (int i = 0; i < valueCounts_empty_rnd.length; i++) { - Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty_rnd[i]); - SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); - if (firstDerivValueCounts_empty_rnd[i] == null) { - assertThat(docCountDeriv, nullValue()); - } else { - assertThat(docCountDeriv.value(), equalTo(firstDerivValueCounts_empty_rnd[i])); - } + executeTestCase(query, aggBuilder, histogram -> { + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + List buckets = ((Histogram) histogram).getBuckets(); + assertThat(buckets.size(), equalTo(numBuckets_empty_rnd)); + assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx_rnd)); + + for (int i = 0; i < valueCounts_empty_rnd.length; i++) { + Histogram.Bucket bucket = buckets.get(i); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty_rnd[i]); + SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); + if (firstDerivValueCounts_empty_rnd[i] == null) { + assertThat(docCountDeriv, nullValue()); + } else { + assertThat(docCountDeriv.value(), equalTo(firstDerivValueCounts_empty_rnd[i])); } - }, - indexWriter -> { - Document document = new Document(); - for (int i = 0; i < numBuckets_empty_rnd; i++) { - valueCounts_empty_rnd[i] = (long) randomIntBetween(1, 10); - // make approximately half of the buckets empty - if (randomBoolean()) - valueCounts_empty_rnd[i] = 0L; - for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) { - document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); - indexWriter.addDocument(document); - document.clear(); - numDocsEmptyIdx_rnd++; - } - if (i > 0) { - firstDerivValueCounts_empty_rnd[i] = (double) valueCounts_empty_rnd[i] - valueCounts_empty_rnd[i - 1]; - } - indexWriter.commit(); + } + }, indexWriter -> { + Document document = new Document(); + for (int i = 0; i < numBuckets_empty_rnd; i++) { + valueCounts_empty_rnd[i] = (long) randomIntBetween(1, 10); + // make approximately half of the buckets empty + if (randomBoolean()) valueCounts_empty_rnd[i] = 0L; + for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) { + document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); + indexWriter.addDocument(document); + document.clear(); + numDocsEmptyIdx_rnd++; } - }); + if (i > 0) { + firstDerivValueCounts_empty_rnd[i] = (double) valueCounts_empty_rnd[i] - valueCounts_empty_rnd[i - 1]; + } + indexWriter.commit(); + } + }); } public void testDocCountDerivativeWithGaps_insertZeros() throws IOException { setupValueCounts(); Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(1); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1); aggBuilder.subAggregation( - new DerivativePipelineAggregationBuilder("deriv", "_count") - .gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS) + new DerivativePipelineAggregationBuilder("deriv", "_count").gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS) ); - executeTestCase(query, aggBuilder, - histogram -> { - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - List buckets = ((Histogram) histogram).getBuckets(); - assertThat(buckets.size(), equalTo(valueCounts_empty.length)); - assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx)); - - for (int i = 0; i < valueCounts_empty.length; i++) { - Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("InternalBucket " + i + ": ", bucket, i, valueCounts_empty[i]); - SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); - if (firstDerivValueCounts_empty[i] == null) { - assertThat(docCountDeriv, nullValue()); - } else { - assertThat(docCountDeriv.value(), equalTo(firstDerivValueCounts_empty[i])); - } + executeTestCase(query, aggBuilder, histogram -> { + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + List buckets = ((Histogram) histogram).getBuckets(); + assertThat(buckets.size(), equalTo(valueCounts_empty.length)); + assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx)); + + for (int i = 0; i < valueCounts_empty.length; i++) { + Histogram.Bucket bucket = buckets.get(i); + checkBucketKeyAndDocCount("InternalBucket " + i + ": ", bucket, i, valueCounts_empty[i]); + SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); + if (firstDerivValueCounts_empty[i] == null) { + assertThat(docCountDeriv, nullValue()); + } else { + assertThat(docCountDeriv.value(), equalTo(firstDerivValueCounts_empty[i])); } - }, - indexWriter -> { - Document document = new Document(); - for (int i = 0; i < valueCounts_empty.length; i++) { - for (int docs = 0; docs < valueCounts_empty[i]; docs++) { - document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); - indexWriter.addDocument(document); - document.clear(); - numDocsEmptyIdx++; - } + } + }, indexWriter -> { + Document document = new Document(); + for (int i = 0; i < valueCounts_empty.length; i++) { + for (int docs = 0; docs < valueCounts_empty[i]; docs++) { + document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); + indexWriter.addDocument(document); + document.clear(); + numDocsEmptyIdx++; } - indexWriter.commit(); - }); + } + indexWriter.commit(); + }); } public void testSingleValueAggDerivativeWithGaps() throws Exception { setupValueCounts(); Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME).interval(1); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1); aggBuilder.subAggregation(new SumAggregationBuilder("sum").field(SINGLE_VALUED_FIELD_NAME)); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "sum")); - executeTestCase(query, aggBuilder, - histogram -> { - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - List buckets = ((Histogram) histogram).getBuckets(); - assertThat(buckets.size(), equalTo(valueCounts_empty.length)); - assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx)); - - double lastSumValue = Double.NaN; - for (int i = 0; i < valueCounts_empty.length; i++) { - Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); - Sum sum = bucket.getAggregations().get("sum"); - double thisSumValue = sum.value(); - if (bucket.getDocCount() == 0) { - thisSumValue = Double.NaN; - } - SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); - if (i == 0) { - assertThat(sumDeriv, nullValue()); + executeTestCase(query, aggBuilder, histogram -> { + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + List buckets = ((Histogram) histogram).getBuckets(); + assertThat(buckets.size(), equalTo(valueCounts_empty.length)); + assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx)); + + double lastSumValue = Double.NaN; + for (int i = 0; i < valueCounts_empty.length; i++) { + Histogram.Bucket bucket = buckets.get(i); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); + Sum sum = bucket.getAggregations().get("sum"); + double thisSumValue = sum.value(); + if (bucket.getDocCount() == 0) { + thisSumValue = Double.NaN; + } + SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); + if (i == 0) { + assertThat(sumDeriv, nullValue()); + } else { + double expectedDerivative = thisSumValue - lastSumValue; + if (Double.isNaN(expectedDerivative)) { + assertThat(sumDeriv.value(), equalTo(expectedDerivative)); } else { - double expectedDerivative = thisSumValue - lastSumValue; - if (Double.isNaN(expectedDerivative)) { - assertThat(sumDeriv.value(), equalTo(expectedDerivative)); - } else { - assertThat(sumDeriv.value(), closeTo(expectedDerivative, 0.00001)); - } + assertThat(sumDeriv.value(), closeTo(expectedDerivative, 0.00001)); } - lastSumValue = thisSumValue; } - }, - indexWriter -> { - Document document = new Document(); - for (int i = 0; i < valueCounts_empty.length; i++) { - for (int docs = 0; docs < valueCounts_empty[i]; docs++) { - document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); - indexWriter.addDocument(document); - document.clear(); - numDocsEmptyIdx++; - } + lastSumValue = thisSumValue; + } + }, indexWriter -> { + Document document = new Document(); + for (int i = 0; i < valueCounts_empty.length; i++) { + for (int docs = 0; docs < valueCounts_empty[i]; docs++) { + document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); + indexWriter.addDocument(document); + document.clear(); + numDocsEmptyIdx++; } - indexWriter.commit(); - }); + } + indexWriter.commit(); + }); } public void testSingleValueAggDerivativeWithGaps_insertZeros() throws IOException { setupValueCounts(); Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME).interval(1); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1); aggBuilder.subAggregation(new SumAggregationBuilder("sum").field(SINGLE_VALUED_FIELD_NAME)); - aggBuilder.subAggregation( - new DerivativePipelineAggregationBuilder("deriv", "sum") - .gapPolicy(GapPolicy.INSERT_ZEROS) - ); - - executeTestCase(query, aggBuilder, - histogram -> { - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - List buckets = ((Histogram) histogram).getBuckets(); - assertThat(buckets.size(), equalTo(valueCounts_empty.length)); - assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx)); - - double lastSumValue = Double.NaN; - for (int i = 0; i < valueCounts_empty.length; i++) { - Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); - Sum sum = bucket.getAggregations().get("sum"); - double thisSumValue = sum.value(); - if (bucket.getDocCount() == 0) { - thisSumValue = 0; - } - SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); - if (i == 0) { - assertThat(sumDeriv, nullValue()); - } else { - double expectedDerivative = thisSumValue - lastSumValue; - assertThat(sumDeriv.value(), closeTo(expectedDerivative, 0.00001)); - } - lastSumValue = thisSumValue; + aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "sum").gapPolicy(GapPolicy.INSERT_ZEROS)); + + executeTestCase(query, aggBuilder, histogram -> { + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + List buckets = ((Histogram) histogram).getBuckets(); + assertThat(buckets.size(), equalTo(valueCounts_empty.length)); + assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx)); + + double lastSumValue = Double.NaN; + for (int i = 0; i < valueCounts_empty.length; i++) { + Histogram.Bucket bucket = buckets.get(i); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]); + Sum sum = bucket.getAggregations().get("sum"); + double thisSumValue = sum.value(); + if (bucket.getDocCount() == 0) { + thisSumValue = 0; } - }, - indexWriter -> { - Document document = new Document(); - for (int i = 0; i < valueCounts_empty.length; i++) { - if (frequently()) { - indexWriter.commit(); - } - for (int docs = 0; docs < valueCounts_empty[i]; docs++) { - document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); - indexWriter.addDocument(document); - document.clear(); - numDocsEmptyIdx++; - } + SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); + if (i == 0) { + assertThat(sumDeriv, nullValue()); + } else { + double expectedDerivative = thisSumValue - lastSumValue; + assertThat(sumDeriv.value(), closeTo(expectedDerivative, 0.00001)); } - indexWriter.commit(); - }); + lastSumValue = thisSumValue; + } + }, indexWriter -> { + Document document = new Document(); + for (int i = 0; i < valueCounts_empty.length; i++) { + if (frequently()) { + indexWriter.commit(); + } + for (int docs = 0; docs < valueCounts_empty[i]; docs++) { + document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); + indexWriter.addDocument(document); + document.clear(); + numDocsEmptyIdx++; + } + } + indexWriter.commit(); + }); } public void testSingleValueAggDerivativeWithGaps_random() throws IOException { setupValueCounts(); BucketHelpers.GapPolicy gapPolicy = randomFrom(GapPolicy.values()); Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME).interval(1) + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(1) .extendedBounds(0L, (long) numBuckets_empty_rnd - 1); aggBuilder.subAggregation(new SumAggregationBuilder("sum").field(SINGLE_VALUED_FIELD_NAME)); - aggBuilder.subAggregation( - new DerivativePipelineAggregationBuilder("deriv", "sum").gapPolicy(gapPolicy) - ); - executeTestCase(query, aggBuilder, - histogram -> { - assertThat(histogram, notNullValue()); - assertThat(histogram.getName(), equalTo("histo")); - List buckets = ((Histogram) histogram).getBuckets(); - assertThat(buckets.size(), equalTo(valueCounts_empty_rnd.length)); - assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx_rnd)); - - double lastSumValue = Double.NaN; - for (int i = 0; i < valueCounts_empty_rnd.length; i++) { - Histogram.Bucket bucket = buckets.get(i); - checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty_rnd[i]); - Sum sum = bucket.getAggregations().get("sum"); - double thisSumValue = sum.value(); - if (bucket.getDocCount() == 0) { - switch (gapPolicy) { - case INSERT_ZEROS: - thisSumValue = 0; - break; - case KEEP_VALUES: - break; - default: - thisSumValue = Double.NaN; - } + aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "sum").gapPolicy(gapPolicy)); + executeTestCase(query, aggBuilder, histogram -> { + assertThat(histogram, notNullValue()); + assertThat(histogram.getName(), equalTo("histo")); + List buckets = ((Histogram) histogram).getBuckets(); + assertThat(buckets.size(), equalTo(valueCounts_empty_rnd.length)); + assertThat(getTotalDocCountAcrossBuckets(buckets), equalTo(numDocsEmptyIdx_rnd)); + + double lastSumValue = Double.NaN; + for (int i = 0; i < valueCounts_empty_rnd.length; i++) { + Histogram.Bucket bucket = buckets.get(i); + checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty_rnd[i]); + Sum sum = bucket.getAggregations().get("sum"); + double thisSumValue = sum.value(); + if (bucket.getDocCount() == 0) { + switch (gapPolicy) { + case INSERT_ZEROS: + thisSumValue = 0; + break; + case KEEP_VALUES: + break; + default: + thisSumValue = Double.NaN; } - SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); - if (i == 0) { - assertThat(sumDeriv, nullValue()); + } + SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); + if (i == 0) { + assertThat(sumDeriv, nullValue()); + } else { + double expectedDerivative = thisSumValue - lastSumValue; + if (Double.isNaN(expectedDerivative)) { + assertThat(sumDeriv.value(), equalTo(expectedDerivative)); } else { - double expectedDerivative = thisSumValue - lastSumValue; - if (Double.isNaN(expectedDerivative)) { - assertThat(sumDeriv.value(), equalTo(expectedDerivative)); - } else { - assertThat(sumDeriv.value(), closeTo(expectedDerivative, 0.00001)); - } + assertThat(sumDeriv.value(), closeTo(expectedDerivative, 0.00001)); } - lastSumValue = thisSumValue; } - }, - indexWriter -> { - Document document = new Document(); - for (int i = 0; i < numBuckets_empty_rnd; i++) { - valueCounts_empty_rnd[i] = (long) randomIntBetween(1, 10); - // make approximately half of the buckets empty - if (randomBoolean()) - valueCounts_empty_rnd[i] = 0L; - for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) { - document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); - indexWriter.addDocument(document); - document.clear(); - numDocsEmptyIdx_rnd++; - } - if (i > 0) { - firstDerivValueCounts_empty_rnd[i] = (double) valueCounts_empty_rnd[i] - valueCounts_empty_rnd[i - 1]; - } - indexWriter.commit(); + lastSumValue = thisSumValue; + } + }, indexWriter -> { + Document document = new Document(); + for (int i = 0; i < numBuckets_empty_rnd; i++) { + valueCounts_empty_rnd[i] = (long) randomIntBetween(1, 10); + // make approximately half of the buckets empty + if (randomBoolean()) valueCounts_empty_rnd[i] = 0L; + for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) { + document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); + indexWriter.addDocument(document); + document.clear(); + numDocsEmptyIdx_rnd++; + } + if (i > 0) { + firstDerivValueCounts_empty_rnd[i] = (double) valueCounts_empty_rnd[i] - valueCounts_empty_rnd[i - 1]; } - }); + indexWriter.commit(); + } + }); } public void testSingleValueAggDerivative_invalidPath() throws IOException { try { Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field(SINGLE_VALUED_FIELD_NAME).interval(1); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1); aggBuilder.subAggregation( - new FiltersAggregationBuilder("filters", QueryBuilders.termQuery("tag", "foo")) - .subAggregation(new SumAggregationBuilder("sum").field(SINGLE_VALUED_FIELD_NAME)) + new FiltersAggregationBuilder("filters", QueryBuilders.termQuery("tag", "foo")).subAggregation( + new SumAggregationBuilder("sum").field(SINGLE_VALUED_FIELD_NAME) + ) ); aggBuilder.subAggregation(new SumAggregationBuilder("sum").field(SINGLE_VALUED_FIELD_NAME)); - aggBuilder.subAggregation( - new DerivativePipelineAggregationBuilder("deriv", "filters>get>sum") - ); + aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "filters>get>sum")); executeTestCase(query, aggBuilder, history -> {}); fail("Expected an Exception but didn't get one"); } catch (Exception e) { @@ -665,8 +637,7 @@ public void testSingleValueAggDerivative_invalidPath() throws IOException { public void testDerivDerivNPE() throws IOException { try (Directory directory = newDirectory()) { Query query = new MatchAllDocsQuery(); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo") - .field("tick").interval(1); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("histo").field("tick").interval(1); aggBuilder.subAggregation(new AvgAggregationBuilder("avg").field("value")); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv1", "avg")); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("deriv2", "deriv1")); @@ -702,8 +673,12 @@ private Long getTotalDocCountAcrossBuckets(List buck return count; } - private void checkBucketKeyAndDocCount(final String msg, final Histogram.Bucket bucket, final long expectedKey, - final long expectedDocCount) { + private void checkBucketKeyAndDocCount( + final String msg, + final Histogram.Bucket bucket, + final long expectedKey, + final long expectedDocCount + ) { assertThat(msg, bucket, notNullValue()); assertThat(msg + " key", ((Number) bucket.getKey()).longValue(), equalTo(expectedKey)); assertThat(msg + " docCount", bucket.getDocCount(), equalTo(expectedDocCount)); @@ -723,8 +698,12 @@ private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consume }); } - private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consumer verify, - CheckedConsumer setup) throws IOException { + private void executeTestCase( + Query query, + AggregationBuilder aggBuilder, + Consumer verify, + CheckedConsumer setup + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { setup.accept(indexWriter); @@ -734,8 +713,7 @@ private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consume IndexSearcher indexSearcher = newSearcher(indexReader, true, true); DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(SINGLE_VALUED_FIELD_NAME); - MappedFieldType valueFieldType - = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); + MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); InternalAggregation histogram = searchAndReduce(indexSearcher, query, aggBuilder, fieldType, valueFieldType); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java index 42ab1d7883d36..a29ba8f5e8227 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java @@ -49,8 +49,13 @@ protected DerivativePipelineAggregationBuilder createTestAggregatorFactory() { * The validation should verify the parent aggregation is allowed. */ public void testValidate() throws IOException { - assertThat(validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), - new DerivativePipelineAggregationBuilder("name", "valid")), nullValue()); + assertThat( + validate( + PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), + new DerivativePipelineAggregationBuilder("name", "valid") + ), + nullValue() + ); } /** @@ -64,8 +69,12 @@ public void testValidateException() throws IOException { AggregationBuilder parent = mock(AggregationBuilder.class); when(parent.getName()).thenReturn("name"); - assertThat(validate(parent, new DerivativePipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( + assertThat( + validate(parent, new DerivativePipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( "Validation Failed: 1: derivative aggregation [name] must have a histogram, " - + "date_histogram or auto_date_histogram as parent;")); + + "date_histogram or auto_date_histogram as parent;" + ) + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketTests.java index 1bb590ed58ccd..54c18dfb1052a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketTests.java @@ -35,16 +35,17 @@ protected ExtendedStatsBucketPipelineAggregationBuilder doCreateTestAggregatorFa public void testSigmaFromInt() throws Exception { XContentBuilder content = XContentFactory.jsonBuilder() .startObject() - .startObject("name") - .startObject("extended_stats_bucket") - .field("sigma", 5) - .field("buckets_path", "test") - .endObject() - .endObject() + .startObject("name") + .startObject("extended_stats_bucket") + .field("sigma", 5) + .field("buckets_path", "test") + .endObject() + .endObject() .endObject(); ExtendedStatsBucketPipelineAggregationBuilder builder = (ExtendedStatsBucketPipelineAggregationBuilder) parse( - createParser(content)); + createParser(content) + ); assertThat(builder.sigma(), equalTo(5.0)); } @@ -57,15 +58,26 @@ public void testValidate() { aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg - assertThat(validate(aggBuilders, new ExtendedStatsBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( - "Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " aggregation does not exist for aggregation [name]: invalid_agg>metric;")); + assertThat( + validate(aggBuilders, new ExtendedStatsBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( + "Validation Failed: 1: " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " aggregation does not exist for aggregation [name]: invalid_agg>metric;" + ) + ); // Now try to point to a single bucket agg - assertThat(validate(aggBuilders, new ExtendedStatsBucketPipelineAggregationBuilder("name", "global>metric")), equalTo( - "Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() - + " for buckets path: global>metric;")); + assertThat( + validate(aggBuilders, new ExtendedStatsBucketPipelineAggregationBuilder("name", "global>metric")), + equalTo( + "Validation Failed: 1: The first aggregation in " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " must be a multi-bucket aggregation for aggregation [name] found :" + + GlobalAggregationBuilder.class.getName() + + " for buckets path: global>metric;" + ) + ); // Now try to point to a valid multi-bucket agg assertThat(validate(aggBuilders, new ExtendedStatsBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/GapPolicyTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/GapPolicyTests.java index f7e2e006035a3..beefd9787b017 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/GapPolicyTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/GapPolicyTests.java @@ -34,8 +34,7 @@ public void testFromString() { assertThat(BucketHelpers.GapPolicy.parse("skip", null), equalTo(BucketHelpers.GapPolicy.SKIP)); assertThat(BucketHelpers.GapPolicy.parse("keep_values", null), equalTo(BucketHelpers.GapPolicy.KEEP_VALUES)); ParsingException e = expectThrows(ParsingException.class, () -> BucketHelpers.GapPolicy.parse("does_not_exist", null)); - assertThat(e.getMessage(), - equalTo("Invalid gap policy: [does_not_exist], accepted values: [insert_zeros, skip, keep_values]")); + assertThat(e.getMessage(), equalTo("Invalid gap policy: [does_not_exist], accepted values: [insert_zeros, skip, keep_values]")); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java index 79e43f33458f1..06caaa5630a6a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java @@ -21,8 +21,9 @@ public class InternalBucketMetricValueTests extends InternalAggregationTestCase< @Override protected InternalBucketMetricValue createTestInstance(String name, Map metadata) { - double value = frequently() ? randomDoubleBetween(-10000, 100000, true) - : randomFrom(new Double[] { Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN }); + double value = frequently() + ? randomDoubleBetween(-10000, 100000, true) + : randomFrom(new Double[] { Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN }); String[] keys = new String[randomIntBetween(0, 5)]; for (int i = 0; i < keys.length; i++) { keys[i] = randomAlphaOfLength(10); @@ -62,30 +63,30 @@ protected InternalBucketMetricValue mutateInstance(InternalBucketMetricValue ins DocValueFormat formatter = instance.formatter(); Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(value)) { - value += between(1, 100); - } else { - value = randomDoubleBetween(0, 100000, true); - } - break; - case 2: - keys = Arrays.copyOf(keys, keys.length + 1); - keys[keys.length - 1] = randomAlphaOfLengthBetween(1, 20); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(value)) { + value += between(1, 100); + } else { + value = randomDoubleBetween(0, 100000, true); + } + break; + case 2: + keys = Arrays.copyOf(keys, keys.length + 1); + keys[keys.length - 1] = randomAlphaOfLengthBetween(1, 20); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalBucketMetricValue(name, keys, value, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivativeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivativeTests.java index 5ffa65d41bef0..4565dc0fef996 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivativeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivativeTests.java @@ -21,8 +21,9 @@ public class InternalDerivativeTests extends InternalAggregationTestCase metadata) { DocValueFormat formatter = randomNumericDocValueFormat(); - double value = frequently() ? randomDoubleBetween(-100000, 100000, true) - : randomFrom(new Double[] { Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN }); + double value = frequently() + ? randomDoubleBetween(-100000, 100000, true) + : randomFrom(new Double[] { Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN }); double normalizationFactor = frequently() ? randomDoubleBetween(0, 100000, true) : 0; return new InternalDerivative(name, value, normalizationFactor, formatter, metadata); } @@ -58,29 +59,29 @@ protected InternalDerivative mutateInstance(InternalDerivative instance) { DocValueFormat formatter = instance.formatter(); Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(value)) { - value += between(1, 100); - } else { - value = randomDoubleBetween(0, 100000, true); - } - break; - case 2: - normalizationFactor += between(1, 100); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(value)) { + value += between(1, 100); + } else { + value = randomDoubleBetween(0, 100000, true); + } + break; + case 2: + normalizationFactor += between(1, 100); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalDerivative(name, value, normalizationFactor, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucketTests.java index 083b89b396cf0..030c34ff920c1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucketTests.java @@ -19,10 +19,17 @@ public class InternalExtendedStatsBucketTests extends InternalExtendedStatsTests { @Override - protected InternalExtendedStatsBucket createInstance(String name, long count, double sum, double min, - double max, double sumOfSqrs, - double sigma, DocValueFormat formatter, - Map metadata) { + protected InternalExtendedStatsBucket createInstance( + String name, + long count, + double sum, + double min, + double max, + double sumOfSqrs, + double sigma, + DocValueFormat formatter, + Map metadata + ) { return new InternalExtendedStatsBucket(name, count, sum, min, max, sumOfSqrs, sigma, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java index 5c7868e05b4a8..a5d33f16d477d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java @@ -38,8 +38,12 @@ protected InternalPercentilesBucket createTestInstance(String name, Map metadata, - double[] percents, boolean keyed) { + private static InternalPercentilesBucket createTestInstance( + String name, + Map metadata, + double[] percents, + boolean keyed + ) { final double[] percentiles = new double[percents.length]; for (int i = 0; i < percents.length; ++i) { percentiles[i] = frequently() ? randomDouble() : Double.NaN; @@ -47,8 +51,13 @@ private static InternalPercentilesBucket createTestInstance(String name, Map metadata, - double[] percents, double[] percentiles, boolean keyed) { + private static InternalPercentilesBucket createTestInstance( + String name, + Map metadata, + double[] percents, + double[] percentiles, + boolean keyed + ) { DocValueFormat format = randomNumericDocValueFormat(); return new InternalPercentilesBucket(name, percents, percentiles, keyed, format, metadata); } @@ -83,7 +92,7 @@ protected final void assertFromXContent(InternalPercentilesBucket aggregation, P * check that we don't rely on the percent array order and that the iterator returns the values in the original order */ public void testPercentOrder() { - final double[] percents = new double[]{ 0.50, 0.25, 0.01, 0.99, 0.60 }; + final double[] percents = new double[] { 0.50, 0.25, 0.01, 0.99, 0.60 }; InternalPercentilesBucket aggregation = createTestInstance("test", Collections.emptyMap(), percents, randomBoolean()); Iterator iterator = aggregation.iterator(); Iterator nameIterator = aggregation.valueNames().iterator(); @@ -104,12 +113,16 @@ public void testPercentOrder() { } public void testErrorOnDifferentArgumentSize() { - final double[] percents = new double[]{ 0.1, 0.2, 0.3}; - final double[] percentiles = new double[]{ 0.10, 0.2}; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new InternalPercentilesBucket("test", percents, - percentiles, randomBoolean(), DocValueFormat.RAW, Collections.emptyMap())); - assertEquals("The number of provided percents and percentiles didn't match. percents: [0.1, 0.2, 0.3], percentiles: [0.1, 0.2]", - e.getMessage()); + final double[] percents = new double[] { 0.1, 0.2, 0.3 }; + final double[] percentiles = new double[] { 0.10, 0.2 }; + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new InternalPercentilesBucket("test", percents, percentiles, randomBoolean(), DocValueFormat.RAW, Collections.emptyMap()) + ); + assertEquals( + "The number of provided percents and percentiles didn't match. percents: [0.1, 0.2, 0.3], percentiles: [0.1, 0.2]", + e.getMessage() + ); } public void testParsedAggregationIteratorOrder() throws IOException { @@ -123,7 +136,7 @@ public void testParsedAggregationIteratorOrder() throws IOException { } public void testEmptyRanksXContent() throws IOException { - double[] percents = new double[]{1,2,3}; + double[] percents = new double[] { 1, 2, 3 }; double[] percentiles = new double[3]; for (int i = 0; i < 3; ++i) { percentiles[i] = randomBoolean() ? Double.NaN : Double.POSITIVE_INFINITY; @@ -138,30 +151,30 @@ public void testEmptyRanksXContent() throws IOException { builder.endObject(); String expected; if (keyed) { - expected = "{\n" + - " \"values\" : {\n" + - " \"1.0\" : null,\n" + - " \"2.0\" : null,\n" + - " \"3.0\" : null\n" + - " }\n" + - "}"; + expected = "{\n" + + " \"values\" : {\n" + + " \"1.0\" : null,\n" + + " \"2.0\" : null,\n" + + " \"3.0\" : null\n" + + " }\n" + + "}"; } else { - expected = "{\n" + - " \"values\" : [\n" + - " {\n" + - " \"key\" : 1.0,\n" + - " \"value\" : null\n" + - " },\n" + - " {\n" + - " \"key\" : 2.0,\n" + - " \"value\" : null\n" + - " },\n" + - " {\n" + - " \"key\" : 3.0,\n" + - " \"value\" : null\n" + - " }\n" + - " ]\n" + - "}"; + expected = "{\n" + + " \"values\" : [\n" + + " {\n" + + " \"key\" : 1.0,\n" + + " \"value\" : null\n" + + " },\n" + + " {\n" + + " \"key\" : 2.0,\n" + + " \"value\" : null\n" + + " },\n" + + " {\n" + + " \"key\" : 3.0,\n" + + " \"value\" : null\n" + + " }\n" + + " ]\n" + + "}"; } assertThat(Strings.toString(builder), equalTo(expected)); @@ -180,27 +193,27 @@ protected InternalPercentilesBucket mutateInstance(InternalPercentilesBucket ins DocValueFormat formatter = instance.formatter(); Map metadata = instance.getMetadata(); switch (between(0, 3)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - percents = Arrays.copyOf(percents, percents.length); - percents[percents.length - 1] = randomDouble(); - break; - case 2: - percentiles = Arrays.copyOf(percentiles, percentiles.length); - percentiles[percentiles.length - 1] = randomDouble(); - break; - case 3: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + percents = Arrays.copyOf(percents, percents.length); + percents[percents.length - 1] = randomDouble(); + break; + case 2: + percentiles = Arrays.copyOf(percentiles, percentiles.length); + percentiles[percentiles.length - 1] = randomDouble(); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalPercentilesBucket(name, percents, percentiles, randomBoolean(), formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java index 234e0b161a549..a73ebb20f3d49 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java @@ -16,13 +16,14 @@ import java.util.List; import java.util.Map; -public class InternalSimpleValueTests extends InternalAggregationTestCase{ +public class InternalSimpleValueTests extends InternalAggregationTestCase { @Override protected InternalSimpleValue createTestInstance(String name, Map metadata) { DocValueFormat formatter = randomNumericDocValueFormat(); - double value = frequently() ? randomDoubleBetween(0, 100000, true) - : randomFrom(new Double[] { Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN }); + double value = frequently() + ? randomDoubleBetween(0, 100000, true) + : randomFrom(new Double[] { Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN }); return new InternalSimpleValue(name, value, formatter, metadata); } @@ -56,26 +57,26 @@ protected InternalSimpleValue mutateInstance(InternalSimpleValue instance) { DocValueFormat formatter = instance.formatter(); Map metadata = instance.getMetadata(); switch (between(0, 2)) { - case 0: - name += randomAlphaOfLength(5); - break; - case 1: - if (Double.isFinite(value)) { - value += between(1, 100); - } else { - value = randomDoubleBetween(0, 100000, true); - } - break; - case 2: - if (metadata == null) { - metadata = new HashMap<>(1); - } else { - metadata = new HashMap<>(instance.getMetadata()); - } - metadata.put(randomAlphaOfLength(15), randomInt()); - break; - default: - throw new AssertionError("Illegal randomisation branch"); + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(value)) { + value += between(1, 100); + } else { + value = randomDoubleBetween(0, 100000, true); + } + break; + case 2: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); } return new InternalSimpleValue(name, value, formatter, metadata); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java index 4ecebade78bdb..f98d9c1106527 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java @@ -34,15 +34,26 @@ public void testValidate() { aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg - assertThat(validate(aggBuilders, new MaxBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( - "Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " aggregation does not exist for aggregation [name]: invalid_agg>metric;")); + assertThat( + validate(aggBuilders, new MaxBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( + "Validation Failed: 1: " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " aggregation does not exist for aggregation [name]: invalid_agg>metric;" + ) + ); // Now try to point to a single bucket agg - assertThat(validate(aggBuilders, new MaxBucketPipelineAggregationBuilder("name", "global>metric")), equalTo( - "Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() - + " for buckets path: global>metric;")); + assertThat( + validate(aggBuilders, new MaxBucketPipelineAggregationBuilder("name", "global>metric")), + equalTo( + "Validation Failed: 1: The first aggregation in " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " must be a multi-bucket aggregation for aggregation [name] found :" + + GlobalAggregationBuilder.class.getName() + + " for buckets path: global>metric;" + ) + ); // Now try to point to a valid multi-bucket agg assertThat(validate(aggBuilders, new MaxBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketTests.java index 6349a21be80dd..ffbf6156c2734 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketTests.java @@ -34,15 +34,26 @@ public void testValidate() { aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg - assertThat(validate(aggBuilders, new MinBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( - "Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " aggregation does not exist for aggregation [name]: invalid_agg>metric;")); + assertThat( + validate(aggBuilders, new MinBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( + "Validation Failed: 1: " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " aggregation does not exist for aggregation [name]: invalid_agg>metric;" + ) + ); // Now try to point to a single bucket agg - assertThat(validate(aggBuilders, new MinBucketPipelineAggregationBuilder("name", "global>metric")), equalTo( - "Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() - + " for buckets path: global>metric;")); + assertThat( + validate(aggBuilders, new MinBucketPipelineAggregationBuilder("name", "global>metric")), + equalTo( + "Validation Failed: 1: The first aggregation in " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " must be a multi-bucket aggregation for aggregation [name] found :" + + GlobalAggregationBuilder.class.getName() + + " for buckets path: global>metric;" + ) + ); // Now try to point to a valid multi-bucket agg assertThat(validate(aggBuilders, new MinBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java index ef1d60b4bc70f..541ef1cf36000 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java @@ -63,15 +63,18 @@ public class MovFnAggrgatorTests extends AggregatorTestCase { "2017-01-07T13:47:43", "2017-01-08T16:14:34", "2017-01-09T17:09:50", - "2017-01-10T22:55:46"); + "2017-01-10T22:55:46" + ); - private static final List datasetValues = Arrays.asList(1,2,3,4,5,6,7,8,9,10); + private static final List datasetValues = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); @Override protected ScriptService getMockScriptService() { - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, + MockScriptEngine scriptEngine = new MockScriptEngine( + MockScriptEngine.NAME, Collections.singletonMap("test", script -> MovingFunctions.max((double[]) script.get("_values"))), - Collections.emptyMap()); + Collections.emptyMap() + ); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); @@ -111,9 +114,7 @@ private void check(int shift, int window, List expected) throws IOExcept }); } - private void executeTestCase(Query query, - DateHistogramAggregationBuilder aggBuilder, - Consumer verify) throws IOException { + private void executeTestCase(Query query, DateHistogramAggregationBuilder aggBuilder, Consumer verify) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { @@ -134,12 +135,10 @@ private void executeTestCase(Query query, IndexSearcher indexSearcher = newSearcher(indexReader, true, true); DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggBuilder.field()); - MappedFieldType valueFieldType - = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); + MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); InternalDateHistogram histogram; - histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, - new MappedFieldType[]{fieldType, valueFieldType}); + histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, new MappedFieldType[] { fieldType, valueFieldType }); verify.accept(histogram); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilderSerializationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilderSerializationTests.java index daf04e2dc6dea..ea90342f9d103 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilderSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilderSerializationTests.java @@ -37,8 +37,13 @@ protected MovFnPipelineAggregationBuilder createTestAggregatorFactory() { public void testValidParent() throws IOException { Script script = new Script(Script.DEFAULT_SCRIPT_TYPE, "painless", "test", emptyMap()); - assertThat(validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), - new MovFnPipelineAggregationBuilder("mov_fn", "avg", script, 3)), nullValue()); + assertThat( + validate( + PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), + new MovFnPipelineAggregationBuilder("mov_fn", "avg", script, 3) + ), + nullValue() + ); } public void testInvalidParent() throws IOException { @@ -46,16 +51,23 @@ public void testInvalidParent() throws IOException { AggregationBuilder parent = mock(AggregationBuilder.class); when(parent.getName()).thenReturn("name"); - assertThat(validate(parent, new MovFnPipelineAggregationBuilder("name", "invalid_agg>metric", script, 1)), equalTo( + assertThat( + validate(parent, new MovFnPipelineAggregationBuilder("name", "invalid_agg>metric", script, 1)), + equalTo( "Validation Failed: 1: moving_fn aggregation [name] must have a histogram, date_histogram" - + " or auto_date_histogram as parent;")); + + " or auto_date_histogram as parent;" + ) + ); } public void testNoParent() throws IOException { Script script = new Script(Script.DEFAULT_SCRIPT_TYPE, "painless", "test", Collections.emptyMap()); - assertThat(validate(emptyList(), new MovFnPipelineAggregationBuilder("name", "invalid_agg>metric", script, 1)), equalTo( + assertThat( + validate(emptyList(), new MovFnPipelineAggregationBuilder("name", "invalid_agg>metric", script, 1)), + equalTo( "Validation Failed: 1: moving_fn aggregation [name] must have a histogram, date_histogram" - + " or auto_date_histogram as parent but doesn't have a parent;")); + + " or auto_date_histogram as parent but doesn't have a parent;" + ) + ); } } - diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java index a8a3de9450700..4abc87419faee 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java @@ -286,8 +286,10 @@ public void testNullSimpleStdDev() { continue; } - double actual = MovingFunctions.stdDev(window.stream().mapToDouble(Double::doubleValue).toArray(), - MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray())); + double actual = MovingFunctions.stdDev( + window.stream().mapToDouble(Double::doubleValue).toArray(), + MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()) + ); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); @@ -297,8 +299,10 @@ public void testNullSimpleStdDev() { public void testEmptySimpleStdDev() { EvictingQueue window = new EvictingQueue<>(0); - double actual = MovingFunctions.stdDev(window.stream().mapToDouble(Double::doubleValue).toArray(), - MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray())); + double actual = MovingFunctions.stdDev( + window.stream().mapToDouble(Double::doubleValue).toArray(), + MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()) + ); assertThat(actual, equalTo(Double.NaN)); } @@ -471,7 +475,7 @@ public void testHoltLinearMovAvg() { last_b = b; } - double expected = s + (0 * b) ; + double expected = s + (0 * b); double actual = MovingFunctions.holt(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); @@ -516,7 +520,7 @@ public void testHoltWintersMultiplicative() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); - int period = randomIntBetween(1,10); + int period = randomIntBetween(1, 10); int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data EvictingQueue window = new EvictingQueue<>(windowSize); @@ -565,15 +569,21 @@ public void testHoltWintersMultiplicative() { s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); b = beta * (s - last_s) + (1 - beta) * last_b; - seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + seasonal[i] = gamma * (vs[i] / (last_s + last_b)) + (1 - gamma) * seasonal[i - period]; last_s = s; last_b = b; } int idx = window.size() - period + (0 % period); double expected = (s + (1 * b)) * seasonal[idx]; - double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), - alpha, beta, gamma, period, true); + double actual = MovingFunctions.holtWinters( + window.stream().mapToDouble(Double::doubleValue).toArray(), + alpha, + beta, + gamma, + period, + true + ); assertEquals(expected, actual, 0.01 * Math.abs(expected)); } @@ -581,7 +591,7 @@ public void testNullHoltWintersMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); - int period = randomIntBetween(1,10); + int period = randomIntBetween(1, 10); int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data @@ -591,8 +601,14 @@ public void testNullHoltWintersMovAvg() { } for (int i = 0; i < numValues; i++) { - double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), - alpha, beta, gamma, period, false); + double actual = MovingFunctions.holtWinters( + window.stream().mapToDouble(Double::doubleValue).toArray(), + alpha, + beta, + gamma, + period, + false + ); assertThat(actual, equalTo(Double.NaN)); } } @@ -601,10 +617,16 @@ public void testEmptyHoltWintersMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); - int period = randomIntBetween(1,10); + int period = randomIntBetween(1, 10); EvictingQueue window = new EvictingQueue<>(0); - double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), - alpha, beta, gamma, period, false); + double actual = MovingFunctions.holtWinters( + window.stream().mapToDouble(Double::doubleValue).toArray(), + alpha, + beta, + gamma, + period, + false + ); assertThat(actual, equalTo(Double.NaN)); } @@ -612,7 +634,7 @@ public void testHoltWintersAdditive() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); - int period = randomIntBetween(1,10); + int period = randomIntBetween(1, 10); int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data @@ -662,15 +684,21 @@ public void testHoltWintersAdditive() { s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); b = beta * (s - last_s) + (1 - beta) * last_b; - seasonal[i] = gamma * (vs[i] - (last_s - last_b )) + (1 - gamma) * seasonal[i - period]; + seasonal[i] = gamma * (vs[i] - (last_s - last_b)) + (1 - gamma) * seasonal[i - period]; last_s = s; last_b = b; } int idx = window.size() - period + (0 % period); double expected = s + (1 * b) + seasonal[idx]; - double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), - alpha, beta, gamma, period, false); + double actual = MovingFunctions.holtWinters( + window.stream().mapToDouble(Double::doubleValue).toArray(), + alpha, + beta, + gamma, + period, + false + ); assertEquals(expected, actual, 0.01 * Math.abs(expected)); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketTests.java index 7604f97db9308..a0914e8367133 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketTests.java @@ -40,17 +40,17 @@ protected PercentilesBucketPipelineAggregationBuilder doCreateTestAggregatorFact public void testPercentsFromMixedArray() throws Exception { XContentBuilder content = XContentFactory.jsonBuilder() .startObject() - .startObject("name") - .startObject("percentiles_bucket") - .field("buckets_path", "test") - .array("percents", 0, 20.0, 50, 75.99) - .endObject() - .endObject() + .startObject("name") + .startObject("percentiles_bucket") + .field("buckets_path", "test") + .array("percents", 0, 20.0, 50, 75.99) + .endObject() + .endObject() .endObject(); PercentilesBucketPipelineAggregationBuilder builder = (PercentilesBucketPipelineAggregationBuilder) parse(createParser(content)); - assertThat(builder.getPercents(), equalTo(new double[]{0.0, 20.0, 50.0, 75.99})); + assertThat(builder.getPercents(), equalTo(new double[] { 0.0, 20.0, 50.0, 75.99 })); } public void testValidate() { @@ -61,15 +61,26 @@ public void testValidate() { aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg - assertThat(validate(aggBuilders, new PercentilesBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( - "Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " aggregation does not exist for aggregation [name]: invalid_agg>metric;")); + assertThat( + validate(aggBuilders, new PercentilesBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( + "Validation Failed: 1: " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " aggregation does not exist for aggregation [name]: invalid_agg>metric;" + ) + ); // Now try to point to a single bucket agg - assertThat(validate(aggBuilders, new PercentilesBucketPipelineAggregationBuilder("name", "global>metric")), equalTo( - "Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() - + " for buckets path: global>metric;")); + assertThat( + validate(aggBuilders, new PercentilesBucketPipelineAggregationBuilder("name", "global>metric")), + equalTo( + "Validation Failed: 1: The first aggregation in " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " must be a multi-bucket aggregation for aggregation [name] found :" + + GlobalAggregationBuilder.class.getName() + + " for buckets path: global>metric;" + ) + ); // Now try to point to a valid multi-bucket agg assertThat(validate(aggBuilders, new PercentilesBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java index 0e0eaebdcd3d9..0bc85cb175b2f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.pipeline; - import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; @@ -76,7 +75,7 @@ public static ArrayList generateHistogram(int interval, int size, do } if (emptyHisto) { - int idx = randomIntBetween(0, values.size()-1); + int idx = randomIntBetween(0, values.size() - 1); MockBucket bucket = values.get(idx); bucket.count = randomIntBetween(1, 50); bucket.docValues = new double[bucket.count]; @@ -129,7 +128,7 @@ public static double calculateMetric(double[] values, ValuesSourceAggregationBui for (double value : values) { accumulator += value; } - return values.length == 0 ? Double.NaN : accumulator / values.length ; + return values.length == 0 ? Double.NaN : accumulator / values.length; } return 0.0; @@ -138,9 +137,10 @@ public static double calculateMetric(double[] values, ValuesSourceAggregationBui static AggregationBuilder getRandomSequentiallyOrderedParentAgg() throws IOException { @SuppressWarnings("unchecked") Function builder = randomFrom( - HistogramAggregationBuilder::new, - DateHistogramAggregationBuilder::new, - AutoDateHistogramAggregationBuilder::new); + HistogramAggregationBuilder::new, + DateHistogramAggregationBuilder::new, + AutoDateHistogramAggregationBuilder::new + ); return builder.apply("name"); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java index 3b580eaf94668..36fa17461f6e2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java @@ -46,8 +46,13 @@ protected SerialDiffPipelineAggregationBuilder createTestAggregatorFactory() { * The validation should verify the parent aggregation is allowed. */ public void testValidate() throws IOException { - assertThat(validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), - new SerialDiffPipelineAggregationBuilder("name", "valid")), nullValue()); + assertThat( + validate( + PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), + new SerialDiffPipelineAggregationBuilder("name", "valid") + ), + nullValue() + ); } public void testInvalidParent() throws IOException { @@ -55,14 +60,22 @@ public void testInvalidParent() throws IOException { aggBuilders.add(createTestAggregatorFactory()); AggregationBuilder parent = mock(AggregationBuilder.class); when(parent.getName()).thenReturn("name"); - assertThat(validate(parent, new SerialDiffPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( + assertThat( + validate(parent, new SerialDiffPipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( "Validation Failed: 1: serial_diff aggregation [name] must have a histogram, " - + "date_histogram or auto_date_histogram as parent;")); + + "date_histogram or auto_date_histogram as parent;" + ) + ); } public void testNoParent() { - assertThat(validate(emptyList(), new SerialDiffPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( + assertThat( + validate(emptyList(), new SerialDiffPipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( "Validation Failed: 1: serial_diff aggregation [name] must have a histogram, " - + "date_histogram or auto_date_histogram as parent but doesn't have a parent;")); + + "date_histogram or auto_date_histogram as parent but doesn't have a parent;" + ) + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketTests.java index 96e5947703ee3..e027dbc9eb024 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketTests.java @@ -22,8 +22,7 @@ public class StatsBucketTests extends AbstractBucketMetricsTestCase { @Override - protected StatsBucketPipelineAggregationBuilder doCreateTestAggregatorFactory(String name, - String bucketsPath) { + protected StatsBucketPipelineAggregationBuilder doCreateTestAggregatorFactory(String name, String bucketsPath) { return new StatsBucketPipelineAggregationBuilder(name, bucketsPath); } @@ -35,15 +34,26 @@ public void testValidate() { aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg - assertThat(validate(aggBuilders, new StatsBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( - "Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " aggregation does not exist for aggregation [name]: invalid_agg>metric;")); + assertThat( + validate(aggBuilders, new StatsBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( + "Validation Failed: 1: " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " aggregation does not exist for aggregation [name]: invalid_agg>metric;" + ) + ); // Now try to point to a single bucket agg - assertThat(validate(aggBuilders, new StatsBucketPipelineAggregationBuilder("name", "global>metric")), equalTo( - "Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() - + " for buckets path: global>metric;")); + assertThat( + validate(aggBuilders, new StatsBucketPipelineAggregationBuilder("name", "global>metric")), + equalTo( + "Validation Failed: 1: The first aggregation in " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " must be a multi-bucket aggregation for aggregation [name] found :" + + GlobalAggregationBuilder.class.getName() + + " for buckets path: global>metric;" + ) + ); // Now try to point to a valid multi-bucket agg assertThat(validate(aggBuilders, new StatsBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketTests.java index 14671a3c08340..017110719a17b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketTests.java @@ -34,15 +34,26 @@ public void testValidate() { aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg - assertThat(validate(aggBuilders, new SumBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( - "Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " aggregation does not exist for aggregation [name]: invalid_agg>metric;")); + assertThat( + validate(aggBuilders, new SumBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), + equalTo( + "Validation Failed: 1: " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " aggregation does not exist for aggregation [name]: invalid_agg>metric;" + ) + ); // Now try to point to a single bucket agg - assertThat(validate(aggBuilders, new SumBucketPipelineAggregationBuilder("name", "global>metric")), equalTo( - "Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() - + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() - + " for buckets path: global>metric;")); + assertThat( + validate(aggBuilders, new SumBucketPipelineAggregationBuilder("name", "global>metric")), + equalTo( + "Validation Failed: 1: The first aggregation in " + + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " must be a multi-bucket aggregation for aggregation [name] found :" + + GlobalAggregationBuilder.class.getName() + + " for buckets path: global>metric;" + ) + ); // Now try to point to a valid multi-bucket agg assertThat(validate(aggBuilders, new SumBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceTypeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceTypeTests.java index 1433b6025e1a1..9655a353da8e3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceTypeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceTypeTests.java @@ -29,8 +29,7 @@ public void testFromString() { assertThat(CoreValuesSourceType.fromString("keyword"), equalTo(CoreValuesSourceType.KEYWORD)); assertThat(CoreValuesSourceType.fromString("geopoint"), equalTo(CoreValuesSourceType.GEOPOINT)); assertThat(CoreValuesSourceType.fromString("range"), equalTo(CoreValuesSourceType.RANGE)); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> CoreValuesSourceType.fromString("does_not_exist")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> CoreValuesSourceType.fromString("does_not_exist")); assertThat( e.getMessage(), equalTo("No enum constant org.elasticsearch.search.aggregations.support.CoreValuesSourceType.DOES_NOT_EXIST") @@ -98,7 +97,6 @@ public void testDatePrepareRoundingWithDocAndQuery() throws IOException { }); } - private MapperService dateMapperService() throws IOException { return createMapperService(fieldMapping(b -> b.field("type", "date"))); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java index 875d9008bb098..70697d07b91e5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java @@ -31,16 +31,12 @@ public class IncludeExcludeTests extends ESTestCase { public void testEmptyTermsWithOrds() throws IOException { - IncludeExclude inexcl = new IncludeExclude( - new TreeSet<>(Collections.singleton(new BytesRef("foo"))), - null); + IncludeExclude inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("foo"))), null); OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); LongBitSet acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet()); assertEquals(0, acceptedOrds.length()); - inexcl = new IncludeExclude( - null, - new TreeSet<>(Collections.singleton(new BytesRef("foo")))); + inexcl = new IncludeExclude(null, new TreeSet<>(Collections.singleton(new BytesRef("foo")))); filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet()); assertEquals(0, acceptedOrds.length()); @@ -91,8 +87,8 @@ public long getValueCount() { } public void testTermAccept() throws IOException { - String[] fooSet = {"foo"}; - String[] barSet = {"bar"}; + String[] fooSet = { "foo" }; + String[] barSet = { "bar" }; String fooRgx = "f.*"; String barRgx = "b.*"; @@ -242,8 +238,7 @@ private IncludeExclude serialize(IncludeExclude incExc, ParseField field) throws } else if (field.getPreferredName().equalsIgnoreCase("exclude")) { return IncludeExclude.parseExclude(parser); } else { - throw new IllegalArgumentException( - "Unexpected field name serialized in test: " + field.getPreferredName()); + throw new IllegalArgumentException("Unexpected field name serialized in test: " + field.getPreferredName()); } } } @@ -348,7 +343,7 @@ private IncludeExclude serializeMixedRegex(IncludeExclude incExc) throws IOExcep } public void testInvalidIncludeExcludeCombination() { - String[] values = {"foo"}; + String[] values = { "foo" }; String regex = "foo"; expectThrows(IllegalArgumentException.class, () -> new IncludeExclude((String) null, null, null, null)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java index 4f3076a76ef02..5e5f5bb72c238 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java @@ -97,7 +97,7 @@ public void testMissingOrds() throws IOException { ords[i][j] = j; } for (int j = ords[i].length - 1; j >= 0; --j) { - final int maxOrd = j == ords[i].length - 1 ? numOrds : ords[i][j+1]; + final int maxOrd = j == ords[i].length - 1 ? numOrds : ords[i][j + 1]; ords[i][j] = TestUtil.nextInt(random(), ords[i][j], maxOrd - 1); } } @@ -147,8 +147,7 @@ public long nextOrd() { assertTrue(withMissingReplaced.advanceExact(i)); if (ords[i].length > 0) { for (int ord : ords[i]) { - assertEquals(values[ord], - withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); + assertEquals(values[ord], withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); } assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } else { @@ -202,7 +201,11 @@ private void doTestGlobalMapping(BytesRef[] values, BytesRef[] globalValues, Byt SortedSetDocValues sortedGlobalValues = asOrds(globalValues); LongUnaryOperator withMissingSegmentToGlobalOrd = MissingValues.getGlobalMapping( - sortedValues, sortedGlobalValues, segmentToGlobalOrd, missing); + sortedValues, + sortedGlobalValues, + segmentToGlobalOrd, + missing + ); SortedSetDocValues withMissingValues = MissingValues.replaceMissing(sortedValues, missing); SortedSetDocValues withMissingGlobalValues = MissingValues.replaceMissing(sortedGlobalValues, missing); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfigTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfigTests.java index c4bf8a9a02e6b..e74ad00f277b1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfigTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfigTests.java @@ -64,20 +64,20 @@ public void testMissingFieldScript() { } public void testBothFieldScript() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new MultiValuesSourceFieldConfig.Builder().setFieldName("foo").setScript(new Script("foo")).build()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MultiValuesSourceFieldConfig.Builder().setFieldName("foo").setScript(new Script("foo")).build() + ); assertThat(e.getMessage(), equalTo("[field] and [script] cannot both be configured. Please specify one or the other.")); } @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, Collections.emptyList()) - .getNamedWriteables()); + return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedWriteables()); } @Override protected NamedXContentRegistry xContentRegistry() { - return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, Collections.emptyList()) - .getNamedXContents()); + return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents()); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java index 17b3012c8fdd8..cd9d8dfb5840b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java @@ -60,8 +60,7 @@ public Object execute() { } @Override - public void setScorer(Scorable scorer) { - } + public void setScorer(Scorable scorer) {} @Override public void setDocument(int doc) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValueTypeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValueTypeTests.java index 146f22ba5e28a..1f244e51024ad 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValueTypeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValueTypeTests.java @@ -33,7 +33,6 @@ public void testCompatibility() { assertTrue(ValueType.DOUBLE.isA(ValueType.DATE)); assertTrue(ValueType.DOUBLE.isA(ValueType.DOUBLE)); - assertTrue(ValueType.LONG.isA(ValueType.NUMERIC)); assertTrue(ValueType.LONG.isA(ValueType.NUMBER)); assertTrue(ValueType.LONG.isA(ValueType.LONG)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistryTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistryTests.java index 31716f1f074c6..5bbee314d5f22 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistryTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistryTests.java @@ -52,10 +52,7 @@ public void testAggregatorNotFoundException() { "bogus", HistogramAggregatorSupplier.class ); - ValuesSourceRegistry registry = new ValuesSourceRegistry( - Map.of(key, List.of()), - null - ); + ValuesSourceRegistry registry = new ValuesSourceRegistry(Map.of(key, List.of()), null); expectThrows(IllegalArgumentException.class, () -> registry.getAggregator(key, fieldOnly)); expectThrows(IllegalArgumentException.class, () -> registry.getAggregator(key, scriptOnly)); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java index d36289638bf79..86728ca2bcbb5 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java @@ -68,26 +68,32 @@ public class AnalyticsPlugin extends Plugin implements SearchPlugin, ActionPlugin, MapperPlugin { private final AnalyticsUsage usage = new AnalyticsUsage(); - public AnalyticsPlugin() { } + public AnalyticsPlugin() {} @Override public List getPipelineAggregations() { List pipelineAggs = new ArrayList<>(); - pipelineAggs.add(new PipelineAggregationSpec( - CumulativeCardinalityPipelineAggregationBuilder.NAME, - CumulativeCardinalityPipelineAggregationBuilder::new, - usage.track(AnalyticsStatsAction.Item.CUMULATIVE_CARDINALITY, - CumulativeCardinalityPipelineAggregationBuilder.PARSER))); - pipelineAggs.add(new PipelineAggregationSpec( - MovingPercentilesPipelineAggregationBuilder.NAME, - MovingPercentilesPipelineAggregationBuilder::new, - usage.track(AnalyticsStatsAction.Item.MOVING_PERCENTILES, - MovingPercentilesPipelineAggregationBuilder.PARSER))); - pipelineAggs.add(new PipelineAggregationSpec( - NormalizePipelineAggregationBuilder.NAME, - NormalizePipelineAggregationBuilder::new, - usage.track(AnalyticsStatsAction.Item.NORMALIZE, - NormalizePipelineAggregationBuilder.PARSER))); + pipelineAggs.add( + new PipelineAggregationSpec( + CumulativeCardinalityPipelineAggregationBuilder.NAME, + CumulativeCardinalityPipelineAggregationBuilder::new, + usage.track(AnalyticsStatsAction.Item.CUMULATIVE_CARDINALITY, CumulativeCardinalityPipelineAggregationBuilder.PARSER) + ) + ); + pipelineAggs.add( + new PipelineAggregationSpec( + MovingPercentilesPipelineAggregationBuilder.NAME, + MovingPercentilesPipelineAggregationBuilder::new, + usage.track(AnalyticsStatsAction.Item.MOVING_PERCENTILES, MovingPercentilesPipelineAggregationBuilder.PARSER) + ) + ); + pipelineAggs.add( + new PipelineAggregationSpec( + NormalizePipelineAggregationBuilder.NAME, + NormalizePipelineAggregationBuilder::new, + usage.track(AnalyticsStatsAction.Item.NORMALIZE, NormalizePipelineAggregationBuilder.PARSER) + ) + ); return pipelineAggs; } @@ -97,39 +103,33 @@ public List getAggregations() { new AggregationSpec( StringStatsAggregationBuilder.NAME, StringStatsAggregationBuilder::new, - usage.track(AnalyticsStatsAction.Item.STRING_STATS, StringStatsAggregationBuilder.PARSER)) - .addResultReader(InternalStringStats::new) - .setAggregatorRegistrar(StringStatsAggregationBuilder::registerAggregators), + usage.track(AnalyticsStatsAction.Item.STRING_STATS, StringStatsAggregationBuilder.PARSER) + ).addResultReader(InternalStringStats::new).setAggregatorRegistrar(StringStatsAggregationBuilder::registerAggregators), new AggregationSpec( BoxplotAggregationBuilder.NAME, BoxplotAggregationBuilder::new, - usage.track(AnalyticsStatsAction.Item.BOXPLOT, BoxplotAggregationBuilder.PARSER)) - .addResultReader(InternalBoxplot::new) - .setAggregatorRegistrar(BoxplotAggregationBuilder::registerAggregators), + usage.track(AnalyticsStatsAction.Item.BOXPLOT, BoxplotAggregationBuilder.PARSER) + ).addResultReader(InternalBoxplot::new).setAggregatorRegistrar(BoxplotAggregationBuilder::registerAggregators), new AggregationSpec( TopMetricsAggregationBuilder.NAME, TopMetricsAggregationBuilder::new, - usage.track(AnalyticsStatsAction.Item.TOP_METRICS, TopMetricsAggregationBuilder.PARSER)) - .addResultReader(InternalTopMetrics::new) - .setAggregatorRegistrar(TopMetricsAggregationBuilder::registerAggregators), + usage.track(AnalyticsStatsAction.Item.TOP_METRICS, TopMetricsAggregationBuilder.PARSER) + ).addResultReader(InternalTopMetrics::new).setAggregatorRegistrar(TopMetricsAggregationBuilder::registerAggregators), new AggregationSpec( TTestAggregationBuilder.NAME, TTestAggregationBuilder::new, - usage.track(AnalyticsStatsAction.Item.T_TEST, TTestAggregationBuilder.PARSER)) - .addResultReader(InternalTTest::new) - .setAggregatorRegistrar(TTestAggregationBuilder::registerUsage), + usage.track(AnalyticsStatsAction.Item.T_TEST, TTestAggregationBuilder.PARSER) + ).addResultReader(InternalTTest::new).setAggregatorRegistrar(TTestAggregationBuilder::registerUsage), new AggregationSpec( RateAggregationBuilder.NAME, RateAggregationBuilder::new, - usage.track(AnalyticsStatsAction.Item.RATE, RateAggregationBuilder.PARSER)) - .addResultReader(InternalRate::new) - .setAggregatorRegistrar(RateAggregationBuilder::registerAggregators), + usage.track(AnalyticsStatsAction.Item.RATE, RateAggregationBuilder.PARSER) + ).addResultReader(InternalRate::new).setAggregatorRegistrar(RateAggregationBuilder::registerAggregators), new AggregationSpec( MultiTermsAggregationBuilder.NAME, MultiTermsAggregationBuilder::new, - usage.track(AnalyticsStatsAction.Item.MULTI_TERMS, MultiTermsAggregationBuilder.PARSER)) - .addResultReader(InternalMultiTerms::new) - .setAggregatorRegistrar(MultiTermsAggregationBuilder::registerAggregators) + usage.track(AnalyticsStatsAction.Item.MULTI_TERMS, MultiTermsAggregationBuilder.PARSER) + ).addResultReader(InternalMultiTerms::new).setAggregatorRegistrar(MultiTermsAggregationBuilder::registerAggregators) ); } @@ -138,7 +138,8 @@ public List getAggregations() { return Arrays.asList( new ActionHandler<>(XPackUsageFeatureAction.ANALYTICS, AnalyticsUsageTransportAction.class), new ActionHandler<>(XPackInfoFeatureAction.ANALYTICS, AnalyticsInfoTransportAction.class), - new ActionHandler<>(AnalyticsStatsAction.INSTANCE, TransportAnalyticsStatsAction.class)); + new ActionHandler<>(AnalyticsStatsAction.INSTANCE, TransportAnalyticsStatsAction.class) + ); } @Override @@ -167,10 +168,19 @@ public List> getAggregationExtentions() { } @Override - public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, - Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, Supplier repositoriesServiceSupplier) { + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier repositoriesServiceSupplier + ) { return singletonList(usage); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsUsage.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsUsage.java index 1611ef619b2a3..5c7a90ff04ec7 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsUsage.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsUsage.java @@ -9,8 +9,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.xcontent.ContextParser; -import org.elasticsearch.xpack.core.common.stats.EnumCounters; import org.elasticsearch.xpack.core.analytics.action.AnalyticsStatsAction; +import org.elasticsearch.xpack.core.common.stats.EnumCounters; /** * Tracks usage of the Analytics aggregations. @@ -19,8 +19,7 @@ public class AnalyticsUsage { private final EnumCounters counters = new EnumCounters<>(AnalyticsStatsAction.Item.class); - public AnalyticsUsage() { - } + public AnalyticsUsage() {} /** * Track successful parsing. diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/AnalyticsUsageTransportAction.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/AnalyticsUsageTransportAction.java index a01151616b9d1..2a82649638138 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/AnalyticsUsageTransportAction.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/AnalyticsUsageTransportAction.java @@ -27,21 +27,41 @@ public class AnalyticsUsageTransportAction extends XPackUsageFeatureTransportAct private final Client client; @Inject - public AnalyticsUsageTransportAction(TransportService transportService, ClusterService clusterService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - Client client) { - super(XPackUsageFeatureAction.ANALYTICS.name(), transportService, clusterService, - threadPool, actionFilters, indexNameExpressionResolver); + public AnalyticsUsageTransportAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client + ) { + super( + XPackUsageFeatureAction.ANALYTICS.name(), + transportService, + clusterService, + threadPool, + actionFilters, + indexNameExpressionResolver + ); this.client = client; } @Override - protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, - ActionListener listener) { + protected void masterOperation( + Task task, + XPackUsageRequest request, + ClusterState state, + ActionListener listener + ) { AnalyticsStatsAction.Request statsRequest = new AnalyticsStatsAction.Request(); statsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); - client.execute(AnalyticsStatsAction.INSTANCE, statsRequest, ActionListener.wrap(r -> - listener.onResponse(new XPackUsageFeatureResponse(new AnalyticsFeatureSetUsage(true, true, r))), - listener::onFailure)); + client.execute( + AnalyticsStatsAction.INSTANCE, + statsRequest, + ActionListener.wrap( + r -> listener.onResponse(new XPackUsageFeatureResponse(new AnalyticsFeatureSetUsage(true, true, r))), + listener::onFailure + ) + ); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsAction.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsAction.java index 0cd33be8411c3..0d01ce2fab32b 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsAction.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsAction.java @@ -21,23 +21,41 @@ import java.io.IOException; import java.util.List; -public class TransportAnalyticsStatsAction extends TransportNodesAction { +public class TransportAnalyticsStatsAction extends TransportNodesAction< + AnalyticsStatsAction.Request, + AnalyticsStatsAction.Response, + AnalyticsStatsAction.NodeRequest, + AnalyticsStatsAction.NodeResponse> { private final AnalyticsUsage usage; @Inject - public TransportAnalyticsStatsAction(TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, AnalyticsUsage usage) { - super(AnalyticsStatsAction.NAME, threadPool, clusterService, transportService, actionFilters, - AnalyticsStatsAction.Request::new, AnalyticsStatsAction.NodeRequest::new, ThreadPool.Names.MANAGEMENT, - AnalyticsStatsAction.NodeResponse.class); + public TransportAnalyticsStatsAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + AnalyticsUsage usage + ) { + super( + AnalyticsStatsAction.NAME, + threadPool, + clusterService, + transportService, + actionFilters, + AnalyticsStatsAction.Request::new, + AnalyticsStatsAction.NodeRequest::new, + ThreadPool.Names.MANAGEMENT, + AnalyticsStatsAction.NodeResponse.class + ); this.usage = usage; } @Override - protected AnalyticsStatsAction.Response newResponse(AnalyticsStatsAction.Request request, - List nodes, - List failures) { + protected AnalyticsStatsAction.Response newResponse( + AnalyticsStatsAction.Request request, + List nodes, + List failures + ) { return new AnalyticsStatsAction.Response(clusterService.getClusterName(), nodes, failures); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/AnalyticsAggregatorFactory.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/AnalyticsAggregatorFactory.java index 5b6396f6e2255..0945d05c132a5 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/AnalyticsAggregatorFactory.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/AnalyticsAggregatorFactory.java @@ -34,43 +34,87 @@ public class AnalyticsAggregatorFactory { public static void registerPercentilesAggregator(ValuesSourceRegistry.Builder builder) { - builder.register(PercentilesAggregationBuilder.REGISTRY_KEY, + builder.register( + PercentilesAggregationBuilder.REGISTRY_KEY, AnalyticsValuesSourceType.HISTOGRAM, (name, valuesSource, context, parent, percents, percentilesConfig, keyed, formatter, metadata) -> { if (percentilesConfig.getMethod().equals(PercentilesMethod.TDIGEST)) { - double compression = ((PercentilesConfig.TDigest)percentilesConfig).getCompression(); - return new HistoBackedTDigestPercentilesAggregator(name, valuesSource, context, parent, - percents, compression, keyed, formatter, metadata); + double compression = ((PercentilesConfig.TDigest) percentilesConfig).getCompression(); + return new HistoBackedTDigestPercentilesAggregator( + name, + valuesSource, + context, + parent, + percents, + compression, + keyed, + formatter, + metadata + ); } else if (percentilesConfig.getMethod().equals(PercentilesMethod.HDR)) { - int numSigFig = ((PercentilesConfig.Hdr)percentilesConfig).getNumberOfSignificantValueDigits(); - return new HistoBackedHDRPercentilesAggregator(name, valuesSource, context, parent, - percents, numSigFig, keyed, formatter, metadata); + int numSigFig = ((PercentilesConfig.Hdr) percentilesConfig).getNumberOfSignificantValueDigits(); + return new HistoBackedHDRPercentilesAggregator( + name, + valuesSource, + context, + parent, + percents, + numSigFig, + keyed, + formatter, + metadata + ); } - throw new IllegalArgumentException("Percentiles algorithm: [" + percentilesConfig.getMethod().toString() + "] " + - "is not compatible with Histogram field"); - }, true); + throw new IllegalArgumentException( + "Percentiles algorithm: [" + percentilesConfig.getMethod().toString() + "] " + "is not compatible with Histogram field" + ); + }, + true + ); } public static void registerPercentileRanksAggregator(ValuesSourceRegistry.Builder builder) { - builder.register(PercentileRanksAggregationBuilder.REGISTRY_KEY, + builder.register( + PercentileRanksAggregationBuilder.REGISTRY_KEY, AnalyticsValuesSourceType.HISTOGRAM, (name, valuesSource, context, parent, percents, percentilesConfig, keyed, formatter, metadata) -> { if (percentilesConfig.getMethod().equals(PercentilesMethod.TDIGEST)) { - double compression = ((PercentilesConfig.TDigest)percentilesConfig).getCompression(); - return new HistoBackedTDigestPercentileRanksAggregator(name, valuesSource, context, parent, - percents, compression, keyed, formatter, metadata); + double compression = ((PercentilesConfig.TDigest) percentilesConfig).getCompression(); + return new HistoBackedTDigestPercentileRanksAggregator( + name, + valuesSource, + context, + parent, + percents, + compression, + keyed, + formatter, + metadata + ); } else if (percentilesConfig.getMethod().equals(PercentilesMethod.HDR)) { - int numSigFig = ((PercentilesConfig.Hdr)percentilesConfig).getNumberOfSignificantValueDigits(); - return new HistoBackedHDRPercentileRanksAggregator(name, valuesSource, context, parent, - percents, numSigFig, keyed, formatter, metadata); + int numSigFig = ((PercentilesConfig.Hdr) percentilesConfig).getNumberOfSignificantValueDigits(); + return new HistoBackedHDRPercentileRanksAggregator( + name, + valuesSource, + context, + parent, + percents, + numSigFig, + keyed, + formatter, + metadata + ); } - throw new IllegalArgumentException("Percentiles algorithm: [" + percentilesConfig.getMethod().toString() + "] " + - "is not compatible with Histogram field"); - }, true); + throw new IllegalArgumentException( + "Percentiles algorithm: [" + percentilesConfig.getMethod().toString() + "] " + "is not compatible with Histogram field" + ); + }, + true + ); } public static void registerHistoBackedSumAggregator(ValuesSourceRegistry.Builder builder) { @@ -78,10 +122,12 @@ public static void registerHistoBackedSumAggregator(ValuesSourceRegistry.Builder } public static void registerHistoBackedValueCountAggregator(ValuesSourceRegistry.Builder builder) { - builder.register(ValueCountAggregationBuilder.REGISTRY_KEY, + builder.register( + ValueCountAggregationBuilder.REGISTRY_KEY, AnalyticsValuesSourceType.HISTOGRAM, HistoBackedValueCountAggregator::new, - true); + true + ); } public static void registerHistoBackedAverageAggregator(ValuesSourceRegistry.Builder builder) { @@ -89,10 +135,12 @@ public static void registerHistoBackedAverageAggregator(ValuesSourceRegistry.Bui } public static void registerHistoBackedHistogramAggregator(ValuesSourceRegistry.Builder builder) { - builder.register(HistogramAggregationBuilder.REGISTRY_KEY, + builder.register( + HistogramAggregationBuilder.REGISTRY_KEY, AnalyticsValuesSourceType.HISTOGRAM, HistoBackedHistogramAggregator::new, - true); + true + ); } public static void registerHistoBackedMinggregator(ValuesSourceRegistry.Builder builder) { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java index 11ce9fabebcd0..b83d59e91db16 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java @@ -45,8 +45,22 @@ public HistoBackedHistogramAggregator( CardinalityUpperBound cardinalityUpperBound, Map metadata ) throws IOException { - super(name, factories, interval, offset, order, keyed, minDocCount, extendedBounds, hardBounds, - valuesSourceConfig.format(), context, parent, cardinalityUpperBound, metadata); + super( + name, + factories, + interval, + offset, + order, + keyed, + minDocCount, + extendedBounds, + hardBounds, + valuesSourceConfig.format(), + context, + parent, + cardinalityUpperBound, + metadata + ); // TODO: Stop using null here this.valuesSource = valuesSourceConfig.hasValues() ? (HistogramValuesSource.Histogram) valuesSourceConfig.getValuesSource() : null; @@ -58,8 +72,7 @@ public HistoBackedHistogramAggregator( } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregator.java index 8d1bed14aca90..0179f9498e429 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregator.java @@ -117,7 +117,7 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCol if ((valuesSource instanceof HistogramValuesSource.Histogram) == false) { return LeafBucketCollector.NO_OP_COLLECTOR; } - final HistogramValuesSource.Histogram valuesSource = (HistogramValuesSource.Histogram)this.valuesSource; + final HistogramValuesSource.Histogram valuesSource = (HistogramValuesSource.Histogram) this.valuesSource; final HistogramValues values = valuesSource.getHistogramValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override @@ -141,14 +141,8 @@ public void collect(int doc, long bucket) throws IOException { }; } - abstract int collect( - LeafBucketCollector sub, - int doc, - double value, - long owningBucketOrdinal, - int lowBound, - int count - ) throws IOException; + abstract int collect(LeafBucketCollector sub, int doc, double value, long owningBucketOrdinal, int lowBound, int count) + throws IOException; private static class NoOverlap extends HistoBackedRangeAggregator { @@ -183,14 +177,8 @@ private NoOverlap( } @Override - public int collect( - LeafBucketCollector sub, - int doc, - double value, - long owningBucketOrdinal, - int lowBound, - int count - ) throws IOException { + public int collect(LeafBucketCollector sub, int doc, double value, long owningBucketOrdinal, int lowBound, int count) + throws IOException { int lo = lowBound, hi = ranges.length - 1; while (lo <= hi) { final int mid = (lo + hi) >>> 1; @@ -214,6 +202,7 @@ public int collect( private static class Overlap extends HistoBackedRangeAggregator { private final double[] maxTo; + Overlap( String name, AggregatorFactories factories, @@ -250,14 +239,8 @@ private static class Overlap extends HistoBackedRangeAggregator { } @Override - public int collect( - LeafBucketCollector sub, - int doc, - double value, - long owningBucketOrdinal, - int lowBound, - int count - ) throws IOException { + public int collect(LeafBucketCollector sub, int doc, double value, long owningBucketOrdinal, int lowBound, int count) + throws IOException { int lo = lowBound, hi = ranges.length - 1; // all candidates are between these indexes int mid = (lo + hi) >>> 1; while (lo <= hi) { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/AbstractHistoBackedHDRPercentilesAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/AbstractHistoBackedHDRPercentilesAggregator.java index b082bda51e851..fb37ace576c3a 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/AbstractHistoBackedHDRPercentilesAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/AbstractHistoBackedHDRPercentilesAggregator.java @@ -10,10 +10,10 @@ import org.HdrHistogram.DoubleHistogram; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.HistogramValue; import org.elasticsearch.index.fielddata.HistogramValues; import org.elasticsearch.search.DocValueFormat; @@ -41,9 +41,17 @@ private static int indexOfKey(double[] keys, double key) { protected final int numberOfSignificantValueDigits; protected final boolean keyed; - AbstractHistoBackedHDRPercentilesAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, - double[] keys, int numberOfSignificantValueDigits, boolean keyed, DocValueFormat formatter, - Map metadata) throws IOException { + AbstractHistoBackedHDRPercentilesAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] keys, + int numberOfSignificantValueDigits, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { super(name, context, parent, metadata); this.valuesSource = valuesSource; this.keyed = keyed; @@ -59,12 +67,11 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } - final HistogramValues values = ((HistogramValuesSource.Histogram)valuesSource).getHistogramValues(ctx); + final HistogramValues values = ((HistogramValuesSource.Histogram) valuesSource).getHistogramValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/AbstractHistoBackedTDigestPercentilesAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/AbstractHistoBackedTDigestPercentilesAggregator.java index e3dbf5f620b38..ecc051f855c84 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/AbstractHistoBackedTDigestPercentilesAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/AbstractHistoBackedTDigestPercentilesAggregator.java @@ -9,10 +9,10 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.HistogramValue; import org.elasticsearch.index.fielddata.HistogramValues; import org.elasticsearch.search.DocValueFormat; @@ -41,9 +41,17 @@ private static int indexOfKey(double[] keys, double key) { protected final double compression; protected final boolean keyed; - AbstractHistoBackedTDigestPercentilesAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, - double[] keys, double compression, boolean keyed, DocValueFormat formatter, - Map metadata) throws IOException { + AbstractHistoBackedTDigestPercentilesAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] keys, + double compression, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { super(name, context, parent, metadata); this.valuesSource = valuesSource; this.keyed = keyed; @@ -59,12 +67,11 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } - final HistogramValues values = ((HistogramValuesSource.Histogram)valuesSource).getHistogramValues(ctx); + final HistogramValues values = ((HistogramValuesSource.Histogram) valuesSource).getHistogramValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override @@ -72,7 +79,7 @@ public void collect(int doc, long bucket) throws IOException { TDigestState state = getExistingOrNewHistogram(bigArrays(), bucket); if (values.advanceExact(doc)) { final HistogramValue sketch = values.histogram(); - while(sketch.next()) { + while (sketch.next()) { state.add(sketch.value(), sketch.count()); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java index ce668f39c81f0..ab844e6a0255c 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java @@ -8,9 +8,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.HistogramValue; import org.elasticsearch.index.fielddata.HistogramValues; import org.elasticsearch.search.DocValueFormat; @@ -65,8 +65,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedHDRPercentileRanksAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedHDRPercentileRanksAggregator.java index 848d337b3a92e..d211f98ac5e24 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedHDRPercentileRanksAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedHDRPercentileRanksAggregator.java @@ -20,9 +20,17 @@ public class HistoBackedHDRPercentileRanksAggregator extends AbstractHistoBackedHDRPercentilesAggregator { - public HistoBackedHDRPercentileRanksAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, - double[] percents, int numberOfSignificantValueDigits, boolean keyed, DocValueFormat format, - Map metadata) throws IOException { + public HistoBackedHDRPercentileRanksAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] percents, + int numberOfSignificantValueDigits, + boolean keyed, + DocValueFormat format, + Map metadata + ) throws IOException { super(name, valuesSource, context, parent, percents, numberOfSignificantValueDigits, keyed, format, metadata); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedHDRPercentilesAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedHDRPercentilesAggregator.java index aefa0550375fe..448073a556dcd 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedHDRPercentilesAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedHDRPercentilesAggregator.java @@ -20,9 +20,17 @@ public class HistoBackedHDRPercentilesAggregator extends AbstractHistoBackedHDRPercentilesAggregator { - public HistoBackedHDRPercentilesAggregator(String name, ValuesSource valuesSource, AggregationContext context, Aggregator parent, - double[] percents, int numberOfSignificantValueDigits, boolean keyed, DocValueFormat formatter, - Map metadata) throws IOException { + public HistoBackedHDRPercentilesAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] percents, + int numberOfSignificantValueDigits, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { super(name, valuesSource, context, parent, percents, numberOfSignificantValueDigits, keyed, formatter, metadata); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java index 0357db8a0f3c9..3f6c6b24a86c1 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java @@ -8,8 +8,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.HistogramValue; import org.elasticsearch.index.fielddata.HistogramValues; import org.elasticsearch.search.DocValueFormat; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java index 8fbddbe2a5ad3..dec1b4bb087d7 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java @@ -8,8 +8,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.HistogramValue; import org.elasticsearch.index.fielddata.HistogramValues; import org.elasticsearch.search.DocValueFormat; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java index 95d63ff819ca9..78c4536e713f4 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java @@ -8,8 +8,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.HistogramValue; import org.elasticsearch.index.fielddata.HistogramValues; import org.elasticsearch.search.DocValueFormat; @@ -64,8 +64,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedTDigestPercentileRanksAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedTDigestPercentileRanksAggregator.java index b6f0f9b3c0633..cfc8a5a4257cd 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedTDigestPercentileRanksAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedTDigestPercentileRanksAggregator.java @@ -20,15 +20,17 @@ public class HistoBackedTDigestPercentileRanksAggregator extends AbstractHistoBackedTDigestPercentilesAggregator { - public HistoBackedTDigestPercentileRanksAggregator(String name, - ValuesSource valuesSource, - AggregationContext context, - Aggregator parent, - double[] percents, - double compression, - boolean keyed, - DocValueFormat formatter, - Map metadata) throws IOException { + public HistoBackedTDigestPercentileRanksAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] percents, + double compression, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { super(name, valuesSource, context, parent, percents, compression, keyed, formatter, metadata); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedTDigestPercentilesAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedTDigestPercentilesAggregator.java index bdf9919a69f34..d26be737cecac 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedTDigestPercentilesAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedTDigestPercentilesAggregator.java @@ -20,15 +20,17 @@ public class HistoBackedTDigestPercentilesAggregator extends AbstractHistoBackedTDigestPercentilesAggregator { - public HistoBackedTDigestPercentilesAggregator(String name, - ValuesSource valuesSource, - AggregationContext context, - Aggregator parent, - double[] percents, - double compression, - boolean keyed, - DocValueFormat formatter, - Map metadata) throws IOException { + public HistoBackedTDigestPercentilesAggregator( + String name, + ValuesSource valuesSource, + AggregationContext context, + Aggregator parent, + double[] percents, + double compression, + boolean keyed, + DocValueFormat formatter, + Map metadata + ) throws IOException { super(name, valuesSource, context, parent, percents, compression, keyed, formatter, metadata); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java index abe2ddac530ba..4200d95fece60 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.analytics.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.HistogramValue; import org.elasticsearch.index.fielddata.HistogramValues; import org.elasticsearch.search.aggregations.Aggregator; @@ -37,11 +37,12 @@ public class HistoBackedValueCountAggregator extends NumericMetricsAggregator.Si LongArray counts; public HistoBackedValueCountAggregator( - String name, - ValuesSourceConfig valuesSourceConfig, - AggregationContext aggregationContext, - Aggregator parent, - Map metadata) throws IOException { + String name, + ValuesSourceConfig valuesSourceConfig, + AggregationContext aggregationContext, + Aggregator parent, + Map metadata + ) throws IOException { super(name, aggregationContext, parent, metadata); // TODO: stop using nulls here this.valuesSource = valuesSourceConfig.hasValues() ? (HistogramValuesSource.Histogram) valuesSourceConfig.getValuesSource() : null; @@ -51,8 +52,7 @@ public HistoBackedValueCountAggregator( } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java index 11af5d834b986..aebb2698b0870 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java @@ -37,8 +37,9 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa final IndexFieldData indexFieldData = fieldContext.indexFieldData(); if ((indexFieldData instanceof IndexHistogramFieldData) == false) { - throw new IllegalArgumentException("Expected histogram type on field [" + fieldContext.field() + - "], but got [" + fieldContext.fieldType().typeName() + "]"); + throw new IllegalArgumentException( + "Expected histogram type on field [" + fieldContext.field() + "], but got [" + fieldContext.fieldType().typeName() + "]" + ); } return new HistogramValuesSource.Histogram.Fielddata((IndexHistogramFieldData) indexFieldData); } @@ -54,7 +55,6 @@ public ValuesSource replaceMissing( } }; - public static ValuesSourceType fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java index 92424374bf98d..a0a34ead60bdf 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java @@ -38,8 +38,10 @@ public class BoxplotAggregationBuilder extends ValuesSourceAggregationBuilder.Le BoxplotAggregatorSupplier.class ); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, BoxplotAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + BoxplotAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); PARSER.declareDouble(BoxplotAggregationBuilder::compression, COMPRESSION_FIELD); @@ -51,8 +53,11 @@ public BoxplotAggregationBuilder(String name) { super(name); } - protected BoxplotAggregationBuilder(BoxplotAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, Map metadata) { + protected BoxplotAggregationBuilder( + BoxplotAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.compression = clone.compression; } @@ -91,7 +96,8 @@ protected ValuesSourceType defaultValueSourceType() { public BoxplotAggregationBuilder compression(double compression) { if (compression < 0.0) { throw new IllegalArgumentException( - "[compression] must be greater than or equal to 0. Found [" + compression + "] in [" + name + "]"); + "[compression] must be greater than or equal to 0. Found [" + compression + "] in [" + name + "]" + ); } this.compression = compression; return this; @@ -106,15 +112,15 @@ public double compression() { } @Override - protected BoxplotAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - BoxplotAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + protected BoxplotAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + BoxplotAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new BoxplotAggregatorFactory(name, config, compression, context, parent, subFactoriesBuilder, - metadata, aggregatorSupplier); + return new BoxplotAggregatorFactory(name, config, compression, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier); } @Override @@ -147,10 +153,8 @@ protected ValuesSourceRegistry.RegistryKey getRegistryKey() { return REGISTRY_KEY; } - @Override public Optional> getOutputFieldNames() { return Optional.of(InternalBoxplot.METRIC_NAMES); } } - diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregator.java index b115f68a52a61..71518379f0377 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregator.java @@ -9,9 +9,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.HistogramValue; import org.elasticsearch.index.fielddata.HistogramValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; @@ -36,8 +36,15 @@ public class BoxplotAggregator extends NumericMetricsAggregator.MultiValue { protected ObjectArray states; protected final double compression; - BoxplotAggregator(String name, ValuesSource valuesSource, DocValueFormat formatter, double compression, - AggregationContext context, Aggregator parent, Map metadata) throws IOException { + BoxplotAggregator( + String name, + ValuesSource valuesSource, + DocValueFormat formatter, + double compression, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, context, parent, metadata); this.valuesSource = valuesSource; this.format = formatter; @@ -53,27 +60,26 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } if (valuesSource instanceof HistogramValuesSource.Histogram) { - final HistogramValues values = ((HistogramValuesSource.Histogram)valuesSource).getHistogramValues(ctx); + final HistogramValues values = ((HistogramValuesSource.Histogram) valuesSource).getHistogramValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long bucket) throws IOException { TDigestState state = getExistingOrNewHistogram(bigArrays(), bucket); if (values.advanceExact(doc)) { final HistogramValue sketch = values.histogram(); - while(sketch.next()) { + while (sketch.next()) { state.add(sketch.value(), sketch.count()); } } } }; } else { - final SortedNumericDoubleValues values = ((ValuesSource.Numeric)valuesSource).doubleValues(ctx); + final SortedNumericDoubleValues values = ((ValuesSource.Numeric) valuesSource).doubleValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long bucket) throws IOException { @@ -121,7 +127,6 @@ public double metric(String name, long owningBucketOrd) { return InternalBoxplot.Metrics.resolve(name).value(state); } - @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) { TDigestState state = getState(owningBucketOrdinal); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorFactory.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorFactory.java index 536cb001545a5..d3bb7f4031836 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorFactory.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorFactory.java @@ -32,17 +32,20 @@ static void registerAggregators(ValuesSourceRegistry.Builder builder) { BoxplotAggregationBuilder.REGISTRY_KEY, List.of(CoreValuesSourceType.NUMERIC, AnalyticsValuesSourceType.HISTOGRAM), BoxplotAggregator::new, - true); + true + ); } - BoxplotAggregatorFactory(String name, - ValuesSourceConfig config, - double compression, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - BoxplotAggregatorSupplier aggregatorSupplier) throws IOException { + BoxplotAggregatorFactory( + String name, + ValuesSourceConfig config, + double compression, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + BoxplotAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.compression = compression; this.aggregatorSupplier = aggregatorSupplier; @@ -54,12 +57,8 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build(name, config.getValuesSource(), config.format(), compression, context, parent, metadata); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build(name, config.getValuesSource(), config.format(), compression, context, parent, metadata); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorSupplier.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorSupplier.java index 7faf8e0774284..b1e859f5e5c81 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorSupplier.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorSupplier.java @@ -17,12 +17,14 @@ @FunctionalInterface public interface BoxplotAggregatorSupplier { - Aggregator build(String name, - ValuesSource valuesSource, - DocValueFormat formatter, - double compression, - AggregationContext context, - Aggregator parent, - Map metadata) throws IOException; + Aggregator build( + String name, + ValuesSource valuesSource, + DocValueFormat formatter, + double compression, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException; } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplot.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplot.java index 6f036cee707b8..8c56829c7ccfd 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplot.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplot.java @@ -319,4 +319,3 @@ public boolean equals(Object obj) { return Objects.equals(state, that.state); } } - diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java index 76304f27fdb7f..f98b4725463f6 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java @@ -22,14 +22,16 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.FORMAT; -public class CumulativeCardinalityPipelineAggregationBuilder - extends AbstractPipelineAggregationBuilder { +public class CumulativeCardinalityPipelineAggregationBuilder extends AbstractPipelineAggregationBuilder< + CumulativeCardinalityPipelineAggregationBuilder> { public static final String NAME = "cumulative_cardinality"; public static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>(NAME, false, (args, name) -> { - return new CumulativeCardinalityPipelineAggregationBuilder(name, (String) args[0]); - }); + new ConstructingObjectParser<>( + NAME, + false, + (args, name) -> { return new CumulativeCardinalityPipelineAggregationBuilder(name, (String) args[0]); } + ); static { PARSER.declareString(constructorArg(), BUCKETS_PATH_FIELD); PARSER.declareString(CumulativeCardinalityPipelineAggregationBuilder::format, FORMAT); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java index 9ca72c114ea37..d946df4ac09e6 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregator.java @@ -73,14 +73,17 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext } } - private AbstractHyperLogLogPlusPlus resolveBucketValue(MultiBucketsAggregation agg, - InternalMultiBucketAggregation.InternalBucket bucket, - String aggPath) { + private AbstractHyperLogLogPlusPlus resolveBucketValue( + MultiBucketsAggregation agg, + InternalMultiBucketAggregation.InternalBucket bucket, + String aggPath + ) { List aggPathsList = AggregationPath.parse(aggPath).getPathElementsAsStringList(); Object propertyValue = bucket.getProperty(agg.getName(), aggPathsList); if (propertyValue == null) { - throw new AggregationExecutionException(AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() - + " must reference a cardinality aggregation"); + throw new AggregationExecutionException( + AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() + " must reference a cardinality aggregation" + ); } if (propertyValue instanceof InternalCardinality) { @@ -94,9 +97,14 @@ private AbstractHyperLogLogPlusPlus resolveBucketValue(MultiBucketsAggregation a currentAggName = aggPathsList.get(0); } - throw new AggregationExecutionException(AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() - + " must reference a cardinality aggregation, got: [" - + propertyValue.getClass().getSimpleName() + "] at aggregation [" + currentAggName + "]"); + throw new AggregationExecutionException( + AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() + + " must reference a cardinality aggregation, got: [" + + propertyValue.getClass().getSimpleName() + + "] at aggregation [" + + currentAggName + + "]" + ); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java index 8224881ddad06..7d91492aeaced 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.analytics.mapper; - import com.carrotsearch.hppc.DoubleArrayList; import com.carrotsearch.hppc.IntArrayList; @@ -76,8 +75,12 @@ public static class Builder extends FieldMapper.Builder { public Builder(String name, boolean ignoreMalformedByDefault) { super(name); - this.ignoreMalformed - = Parameter.explicitBoolParam("ignore_malformed", true, m -> toType(m).ignoreMalformed, ignoreMalformedByDefault); + this.ignoreMalformed = Parameter.explicitBoolParam( + "ignore_malformed", + true, + m -> toType(m).ignoreMalformed, + ignoreMalformedByDefault + ); } @Override @@ -87,19 +90,31 @@ protected List> getParameters() { @Override public HistogramFieldMapper build(ContentPath contentPath) { - return new HistogramFieldMapper(name, new HistogramFieldType(buildFullName(contentPath), meta.getValue()), - multiFieldsBuilder.build(this, contentPath), copyTo.build(), this); + return new HistogramFieldMapper( + name, + new HistogramFieldType(buildFullName(contentPath), meta.getValue()), + multiFieldsBuilder.build(this, contentPath), + copyTo.build(), + this + ); } } - public static final TypeParser PARSER - = new TypeParser((n, c) -> new Builder(n, IGNORE_MALFORMED_SETTING.get(c.getSettings())), notInMultiFields(CONTENT_TYPE)); + public static final TypeParser PARSER = new TypeParser( + (n, c) -> new Builder(n, IGNORE_MALFORMED_SETTING.get(c.getSettings())), + notInMultiFields(CONTENT_TYPE) + ); private final Explicit ignoreMalformed; private final boolean ignoreMalformedByDefault; - public HistogramFieldMapper(String simpleName, MappedFieldType mappedFieldType, - MultiFields multiFields, CopyTo copyTo, Builder builder) { + public HistogramFieldMapper( + String simpleName, + MappedFieldType mappedFieldType, + MultiFields multiFields, + CopyTo copyTo, + Builder builder + ) { super(simpleName, mappedFieldType, multiFields, copyTo); this.ignoreMalformed = builder.ignoreMalformed.getValue(); this.ignoreMalformedByDefault = builder.ignoreMalformed.getDefaultValue().value(); @@ -177,14 +192,14 @@ public HistogramValue histogram() throws IOException { @Override public ScriptDocValues getScriptValues() { - throw new UnsupportedOperationException("The [" + CONTENT_TYPE + "] field does not " + - "support scripts"); + throw new UnsupportedOperationException("The [" + CONTENT_TYPE + "] field does not " + "support scripts"); } @Override public SortedBinaryDocValues getBytesValues() { - throw new UnsupportedOperationException("String representation of doc values " + - "for [" + CONTENT_TYPE + "] fields is not supported"); + throw new UnsupportedOperationException( + "String representation of doc values " + "for [" + CONTENT_TYPE + "] fields is not supported" + ); } @Override @@ -205,14 +220,21 @@ public LeafHistogramFieldData loadDirect(LeafReaderContext context) { } @Override - public SortField sortField(Object missingValue, MultiValueMode sortMode, - Nested nested, boolean reverse) { + public SortField sortField(Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { throw new UnsupportedOperationException("can't sort on the [" + CONTENT_TYPE + "] field"); } @Override - public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, - Nested nested, SortOrder sortOrder, DocValueFormat format, int bucketSize, BucketedSort.ExtraData extra) { + public BucketedSort newBucketedSort( + BigArrays bigArrays, + Object missingValue, + MultiValueMode sortMode, + Nested nested, + SortOrder sortOrder, + DocValueFormat format, + int bucketSize, + BucketedSort.ExtraData extra + ) { throw new IllegalArgumentException("can't sort on the [" + CONTENT_TYPE + "] field"); } }; @@ -220,8 +242,9 @@ public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, Mu @Override public Query termQuery(Object value, SearchExecutionContext context) { - throw new IllegalArgumentException("[" + CONTENT_TYPE + "] field do not support searching, " + - "use dedicated aggregations instead: [" + name() + "]"); + throw new IllegalArgumentException( + "[" + CONTENT_TYPE + "] field do not support searching, " + "use dedicated aggregations instead: [" + name() + "]" + ); } } @@ -259,9 +282,17 @@ public void parse(DocumentParserContext context) throws IOException { double val = subParser.doubleValue(); if (val < previousVal) { // values must be in increasing order - throw new MapperParsingException("error parsing field [" - + name() + "], ["+ VALUES_FIELD + "] values must be in increasing order, got [" + val + - "] but previous value was [" + previousVal +"]"); + throw new MapperParsingException( + "error parsing field [" + + name() + + "], [" + + VALUES_FIELD + + "] values must be in increasing order, got [" + + val + + "] but previous value was [" + + previousVal + + "]" + ); } values.add(val); previousVal = val; @@ -280,30 +311,43 @@ public void parse(DocumentParserContext context) throws IOException { token = subParser.nextToken(); } } else { - throw new MapperParsingException("error parsing field [" + - name() + "], with unknown parameter [" + fieldName + "]"); + throw new MapperParsingException("error parsing field [" + name() + "], with unknown parameter [" + fieldName + "]"); } token = subParser.nextToken(); } if (values == null) { - throw new MapperParsingException("error parsing field [" - + name() + "], expected field called [" + VALUES_FIELD.getPreferredName() + "]"); + throw new MapperParsingException( + "error parsing field [" + name() + "], expected field called [" + VALUES_FIELD.getPreferredName() + "]" + ); } if (counts == null) { - throw new MapperParsingException("error parsing field [" - + name() + "], expected field called [" + COUNTS_FIELD.getPreferredName() + "]"); + throw new MapperParsingException( + "error parsing field [" + name() + "], expected field called [" + COUNTS_FIELD.getPreferredName() + "]" + ); } if (values.size() != counts.size()) { - throw new MapperParsingException("error parsing field [" - + name() + "], expected same length from [" + VALUES_FIELD.getPreferredName() +"] and " + - "[" + COUNTS_FIELD.getPreferredName() +"] but got [" + values.size() + " != " + counts.size() +"]"); + throw new MapperParsingException( + "error parsing field [" + + name() + + "], expected same length from [" + + VALUES_FIELD.getPreferredName() + + "] and " + + "[" + + COUNTS_FIELD.getPreferredName() + + "] but got [" + + values.size() + + " != " + + counts.size() + + "]" + ); } ByteBuffersDataOutput dataOutput = new ByteBuffersDataOutput(); for (int i = 0; i < values.size(); i++) { int count = counts.get(i); if (count < 0) { - throw new MapperParsingException("error parsing field [" - + name() + "], ["+ COUNTS_FIELD + "] elements must be >= 0 but got " + counts.get(i)); + throw new MapperParsingException( + "error parsing field [" + name() + "], [" + COUNTS_FIELD + "] elements must be >= 0 but got " + counts.get(i) + ); } else if (count > 0) { // we do not add elements with count == 0 dataOutput.writeVInt(count); @@ -313,15 +357,19 @@ public void parse(DocumentParserContext context) throws IOException { BytesRef docValue = new BytesRef(dataOutput.toArrayCopy(), 0, Math.toIntExact(dataOutput.size())); Field field = new BinaryDocValuesField(name(), docValue); if (context.doc().getByKey(fieldType().name()) != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + - "] doesn't not support indexing multiple values for the same field in the same document"); + throw new IllegalArgumentException( + "Field [" + + name() + + "] of type [" + + typeName() + + "] doesn't not support indexing multiple values for the same field in the same document" + ); } context.doc().addWithKey(fieldType().name(), field); } catch (Exception ex) { if (ignoreMalformed.value() == false) { - throw new MapperParsingException("failed to parse field [{}] of type [{}]", - ex, fieldType().name(), fieldType().typeName()); + throw new MapperParsingException("failed to parse field [{}] of type [{}]", ex, fieldType().name(), fieldType().typeName()); } if (subParser != null) { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java index 892e7875f1def..7f3603fead159 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java @@ -6,10 +6,10 @@ */ package org.elasticsearch.xpack.analytics.movingPercentiles; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -20,16 +20,18 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -public class MovingPercentilesPipelineAggregationBuilder - extends AbstractPipelineAggregationBuilder { +public class MovingPercentilesPipelineAggregationBuilder extends AbstractPipelineAggregationBuilder< + MovingPercentilesPipelineAggregationBuilder> { public static final String NAME = "moving_percentiles"; private static final ParseField WINDOW = new ParseField("window"); private static final ParseField SHIFT = new ParseField("shift"); public static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>(NAME, false, (args, name) -> { - return new MovingPercentilesPipelineAggregationBuilder(name, (String) args[0], (int) args[1]); - }); + new ConstructingObjectParser<>( + NAME, + false, + (args, name) -> { return new MovingPercentilesPipelineAggregationBuilder(name, (String) args[0], (int) args[1]); } + ); static { PARSER.declareString(constructorArg(), BUCKETS_PATH_FIELD); PARSER.declareInt(constructorArg(), WINDOW); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java index 2dbfc18f0616b..c4859779ba55e 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregator.java @@ -34,8 +34,7 @@ public class MovingPercentilesPipelineAggregator extends PipelineAggregator { private final int window; private final int shift; - MovingPercentilesPipelineAggregator(String name, String[] bucketsPaths, int window, int shift, - Map metadata) { + MovingPercentilesPipelineAggregator(String name, String[] bucketsPaths, int window, int shift, Map metadata) { super(name, bucketsPaths, metadata); this.window = window; this.shift = shift; @@ -61,17 +60,23 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceHDR(buckets, histo, newBuckets, factory, config); break; default: - throw new AggregationExecutionException(AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() - + " references an unknown percentile aggregation method: [" + config.method + "]"); + throw new AggregationExecutionException( + AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() + + " references an unknown percentile aggregation method: [" + + config.method + + "]" + ); } return factory.createAggregation(newBuckets); } - private void reduceTDigest(List buckets, - MultiBucketsAggregation histo, - List newBuckets, - HistogramFactory factory, - PercentileConfig config) { + private void reduceTDigest( + List buckets, + MultiBucketsAggregation histo, + List newBuckets, + HistogramFactory factory, + PercentileConfig config + ) { List values = buckets.stream() .map(b -> resolveTDigestBucketValue(histo, b, bucketsPaths()[0])) @@ -81,7 +86,7 @@ private void reduceTDigest(List aggs = bucket.getAggregations().asList().stream() + List aggs = bucket.getAggregations() + .asList() + .stream() .map((p) -> (InternalAggregation) p) .collect(Collectors.toList()); aggs.add(new InternalTDigestPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata())); @@ -113,11 +120,13 @@ private void reduceTDigest(List buckets, - MultiBucketsAggregation histo, - List newBuckets, - HistogramFactory factory, - PercentileConfig config) { + private void reduceHDR( + List buckets, + MultiBucketsAggregation histo, + List newBuckets, + HistogramFactory factory, + PercentileConfig config + ) { List values = buckets.stream() .map(b -> resolveHDRBucketValue(histo, b, bucketsPaths()[0])) @@ -128,7 +137,7 @@ private void reduceHDR(List aggs = bucket.getAggregations().asList().stream() + List aggs = bucket.getAggregations() + .asList() + .stream() .map((p) -> (InternalAggregation) p) .collect(Collectors.toList()); aggs.add(new InternalHDRPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata())); @@ -159,9 +170,11 @@ private void reduceHDR(List aggPathsList = AggregationPath.parse(aggPath).getPathElementsAsStringList(); Object propertyValue = bucket.getProperty(agg.getName(), aggPathsList); if (propertyValue == null) { @@ -170,24 +183,30 @@ private PercentileConfig resolvePercentileConfig(MultiBucketsAggregation agg, if (propertyValue instanceof InternalTDigestPercentiles) { InternalTDigestPercentiles internalTDigestPercentiles = ((InternalTDigestPercentiles) propertyValue); - return new PercentileConfig(PercentilesMethod.TDIGEST, - internalTDigestPercentiles.getKeys(), - internalTDigestPercentiles.keyed(), - internalTDigestPercentiles.formatter()); + return new PercentileConfig( + PercentilesMethod.TDIGEST, + internalTDigestPercentiles.getKeys(), + internalTDigestPercentiles.keyed(), + internalTDigestPercentiles.formatter() + ); } if (propertyValue instanceof InternalHDRPercentiles) { InternalHDRPercentiles internalHDRPercentiles = ((InternalHDRPercentiles) propertyValue); - return new PercentileConfig(PercentilesMethod.HDR, - internalHDRPercentiles.getKeys(), - internalHDRPercentiles.keyed(), - internalHDRPercentiles.formatter()); + return new PercentileConfig( + PercentilesMethod.HDR, + internalHDRPercentiles.getKeys(), + internalHDRPercentiles.keyed(), + internalHDRPercentiles.formatter() + ); } throw buildResolveError(agg, aggPathsList, propertyValue, "percentiles"); } - private TDigestState resolveTDigestBucketValue(MultiBucketsAggregation agg, - InternalMultiBucketAggregation.InternalBucket bucket, - String aggPath) { + private TDigestState resolveTDigestBucketValue( + MultiBucketsAggregation agg, + InternalMultiBucketAggregation.InternalBucket bucket, + String aggPath + ) { List aggPathsList = AggregationPath.parse(aggPath).getPathElementsAsStringList(); Object propertyValue = bucket.getProperty(agg.getName(), aggPathsList); if (propertyValue == null || (propertyValue instanceof InternalTDigestPercentiles) == false) { @@ -196,9 +215,11 @@ private TDigestState resolveTDigestBucketValue(MultiBucketsAggregation agg, return ((InternalTDigestPercentiles) propertyValue).getState(); } - private DoubleHistogram resolveHDRBucketValue(MultiBucketsAggregation agg, - InternalMultiBucketAggregation.InternalBucket bucket, - String aggPath) { + private DoubleHistogram resolveHDRBucketValue( + MultiBucketsAggregation agg, + InternalMultiBucketAggregation.InternalBucket bucket, + String aggPath + ) { List aggPathsList = AggregationPath.parse(aggPath).getPathElementsAsStringList(); Object propertyValue = bucket.getProperty(agg.getName(), aggPathsList); if (propertyValue == null || (propertyValue instanceof InternalHDRPercentiles) == false) { @@ -207,11 +228,19 @@ private DoubleHistogram resolveHDRBucketValue(MultiBucketsAggregation agg, return ((InternalHDRPercentiles) propertyValue).getState(); } - private IllegalArgumentException buildResolveError(MultiBucketsAggregation agg, List aggPathsList, - Object propertyValue, String method) { + private IllegalArgumentException buildResolveError( + MultiBucketsAggregation agg, + List aggPathsList, + Object propertyValue, + String method + ) { if (propertyValue == null) { - return new IllegalArgumentException(AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() - + " must reference a " + method + " percentile aggregation"); + return new IllegalArgumentException( + AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() + + " must reference a " + + method + + " percentile aggregation" + ); } else { String currentAggName; if (aggPathsList.isEmpty()) { @@ -219,9 +248,16 @@ private IllegalArgumentException buildResolveError(MultiBucketsAggregation agg, } else { currentAggName = aggPathsList.get(0); } - return new IllegalArgumentException(AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() - + " must reference a " + method + " percentiles aggregation, got: [" - + propertyValue.getClass().getSimpleName() + "] at aggregation [" + currentAggName + "]"); + return new IllegalArgumentException( + AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() + + " must reference a " + + method + + " percentiles aggregation, got: [" + + propertyValue.getClass().getSimpleName() + + "] at aggregation [" + + currentAggName + + "]" + ); } } @@ -236,7 +272,7 @@ private int clamp(int index, int length) { } // TODO: replace this with the PercentilesConfig that's used by the percentiles builder. - // The config isn't available through the Internal objects + // The config isn't available through the Internal objects /** helper class to collect the percentile's configuration */ private static class PercentileConfig { final double[] keys; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java index 6f358c241d0ec..d7217b93b8c90 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java @@ -7,20 +7,12 @@ package org.elasticsearch.xpack.analytics.multiterms; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ContextParser; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -36,6 +28,14 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + public class MultiTermsAggregationBuilder extends AbstractAggregationBuilder { public static final String NAME = "multi_terms"; public static final ParseField TERMS_FIELD = new ParseField("terms"); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationFactory.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationFactory.java index a8abba7fff725..2642f5c3c6111 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationFactory.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationFactory.java @@ -32,16 +32,19 @@ public class MultiTermsAggregationFactory extends AggregatorFactory { private final Aggregator.SubAggCollectionMode collectMode; private final TermsAggregator.BucketCountThresholds bucketCountThresholds; - public MultiTermsAggregationFactory(String name, List configs, - List formats, - boolean showTermDocCountError, - BucketOrder order, - Aggregator.SubAggCollectionMode collectMode, - TermsAggregator.BucketCountThresholds bucketCountThresholds, - AggregationContext context, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata) throws IOException { + public MultiTermsAggregationFactory( + String name, + List configs, + List formats, + boolean showTermDocCountError, + BucketOrder order, + Aggregator.SubAggCollectionMode collectMode, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { super(name, context, parent, subFactoriesBuilder, metadata); this.configs = configs; this.formats = formats; @@ -52,9 +55,8 @@ public MultiTermsAggregationFactory(String name, List config } @Override - protected Aggregator createInternal(Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + protected Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(this.bucketCountThresholds); if (InternalOrder.isKeyOrder(order) == false && bucketCountThresholds.getShardSize() == MultiTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { @@ -64,7 +66,19 @@ protected Aggregator createInternal(Aggregator parent, bucketCountThresholds.setShardSize(BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize())); } bucketCountThresholds.ensureValidity(); - return new MultiTermsAggregator(name, factories, context, parent, configs, formats, showTermDocCountError, order, - collectMode, bucketCountThresholds, cardinality, metadata); + return new MultiTermsAggregator( + name, + factories, + context, + parent, + configs, + formats, + showTermDocCountError, + order, + collectMode, + bucketCountThresholds, + cardinality, + metadata + ); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java index ac7855ea25049..b522a5efe7eb5 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java @@ -14,11 +14,11 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java index 5a193724d21e1..fd69d73ed6122 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java @@ -7,10 +7,10 @@ package org.elasticsearch.xpack.analytics.normalize; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; @@ -40,8 +40,10 @@ public class NormalizePipelineAggregationBuilder extends AbstractPipelineAggrega @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, false, (args, name) -> new NormalizePipelineAggregationBuilder(name, (String) args[0], - (String) args[1], (List) args[2])); + NAME, + false, + (args, name) -> new NormalizePipelineAggregationBuilder(name, (String) args[0], (String) args[1], (List) args[2]) + ); static { PARSER.declareString(optionalConstructorArg(), FORMAT); @@ -50,12 +52,18 @@ public class NormalizePipelineAggregationBuilder extends AbstractPipelineAggrega } static final Map> NAME_MAP = Map.of( - RescaleZeroToOne.NAME, RescaleZeroToOne::new, - RescaleZeroToOneHundred.NAME, RescaleZeroToOneHundred::new, - Mean.NAME, Mean::new, - ZScore.NAME, ZScore::new, - Percent.NAME, Percent::new, - Softmax.NAME, Softmax::new + RescaleZeroToOne.NAME, + RescaleZeroToOne::new, + RescaleZeroToOneHundred.NAME, + RescaleZeroToOneHundred::new, + Mean.NAME, + Mean::new, + ZScore.NAME, + ZScore::new, + Percent.NAME, + Percent::new, + Softmax.NAME, + Softmax::new ); static String validateMethodName(String name) { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java index 4963545933e9d..159e6ce572a16 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregator.java @@ -30,9 +30,13 @@ public class NormalizePipelineAggregator extends PipelineAggregator { private final DocValueFormat formatter; private final Function methodSupplier; - NormalizePipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, - Function methodSupplier, - Map metadata) { + NormalizePipelineAggregator( + String name, + String[] bucketsPaths, + DocValueFormat formatter, + Function methodSupplier, + Map metadata + ) { super(name, bucketsPaths, metadata); this.formatter = formatter; this.methodSupplier = methodSupplier; @@ -41,13 +45,15 @@ public class NormalizePipelineAggregator extends PipelineAggregator { @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { @SuppressWarnings("unchecked") - InternalMultiBucketAggregation originalAgg = - (InternalMultiBucketAggregation) aggregation; + InternalMultiBucketAggregation originalAgg = (InternalMultiBucketAggregation< + ?, + InternalMultiBucketAggregation.InternalBucket>) aggregation; List buckets = originalAgg.getBuckets(); List newBuckets = new ArrayList<>(buckets.size()); double[] values = buckets.stream() - .mapToDouble(bucket -> resolveBucketValue(originalAgg, bucket, bucketsPaths()[0], GapPolicy.SKIP)).toArray(); + .mapToDouble(bucket -> resolveBucketValue(originalAgg, bucket, bucketsPaths()[0], GapPolicy.SKIP)) + .toArray(); DoubleUnaryOperator method = methodSupplier.apply(values); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineMethods.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineMethods.java index 97d075cc97428..7b7f11846ee00 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineMethods.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineMethods.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.analytics.normalize; - import java.util.function.DoubleUnaryOperator; class NormalizePipelineMethods { @@ -96,7 +95,7 @@ static class Softmax implements DoubleUnaryOperator { Softmax(double[] values) { double sumExp = 0.0; - for (Double value : values) { + for (Double value : values) { if (value.isNaN() == false) { sumExp += Math.exp(value); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java index 4f3c3d1ccc5ce..d07cfd83f04f6 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java @@ -8,8 +8,8 @@ import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.Rounding; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java index 978b9da52fecc..a2a34ddb1d72b 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java @@ -6,16 +6,12 @@ */ package org.elasticsearch.xpack.analytics.rate; -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -30,6 +26,10 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + public class RateAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = "rate"; public static final ParseField UNIT_FIELD = new ParseField("unit"); @@ -119,20 +119,29 @@ protected ValuesSourceRegistry.RegistryKey getRegistryKey() { @Override protected RateAggregatorFactory innerBuild( - AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { if (field() == null && script() == null) { if (rateMode != null) { throw new IllegalArgumentException("The mode parameter is only supported with field or script"); } } - RateAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new RateAggregatorFactory(name, config, rateUnit, rateMode, context, parent, - subFactoriesBuilder, metadata, aggregatorSupplier); + RateAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new RateAggregatorFactory( + name, + config, + rateUnit, + rateMode, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorFactory.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorFactory.java index 8ef0bf191619d..e5a243012395b 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorFactory.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorFactory.java @@ -77,12 +77,8 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCol } @Override - protected Aggregator doCreateInternal( - Aggregator parent, - CardinalityUpperBound bucketCardinality, - Map metadata - ) throws IOException { - return aggregatorSupplier - .build(name, config, rateUnit, rateMode, context, parent, metadata); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound bucketCardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build(name, config, rateUnit, rateMode, context, parent, metadata); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateMode.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateMode.java index e900fc0767659..81f2b1fc70845 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateMode.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateMode.java @@ -12,7 +12,8 @@ * Rate mode - value_count or sum */ public enum RateMode { - VALUE_COUNT, SUM; + VALUE_COUNT, + SUM; public static RateMode resolve(String name) { return RateMode.valueOf(name.toUpperCase(Locale.ROOT)); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStats.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStats.java index cfe35dae08925..563262ec20d6f 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStats.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStats.java @@ -6,9 +6,9 @@ */ package org.elasticsearch.xpack.analytics.stringstats; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -34,7 +34,8 @@ Object getFieldValue(InternalStringStats stats) { Object getFieldValue(InternalStringStats stats) { return stats.getMinLength(); } - }, max_length { + }, + max_length { Object getFieldValue(InternalStringStats stats) { return stats.getMaxLength(); } @@ -61,10 +62,17 @@ Object getFieldValue(InternalStringStats stats) { private final int maxLength; private final Map charOccurrences; - public InternalStringStats(String name, long count, long totalLength, int minLength, int maxLength, - Map charOccurences, boolean showDistribution, - DocValueFormat formatter, - Map metadata) { + public InternalStringStats( + String name, + long count, + long totalLength, + int minLength, + int maxLength, + Map charOccurences, + boolean showDistribution, + DocValueFormat formatter, + Map metadata + ) { super(name, metadata); this.format = formatter; this.showDistribution = showDistribution; @@ -106,7 +114,7 @@ public long getCount() { return count; } - long getTotalLength () { + long getTotalLength() { return totalLength; } @@ -143,12 +151,10 @@ public double getEntropy() { * this character to occur as value. The map is ordered by frequency descending. */ Map getDistribution() { - return charOccurrences.entrySet().stream() + return charOccurrences.entrySet() + .stream() .sorted((e1, e2) -> e2.getValue().compareTo(e1.getValue())) - .collect( - Collectors.toMap(e -> e.getKey(), e -> (double) e.getValue() / totalLength, - (e1, e2) -> e2, LinkedHashMap::new) - ); + .collect(Collectors.toMap(e -> e.getKey(), e -> (double) e.getValue() / totalLength, (e1, e2) -> e2, LinkedHashMap::new)); } /** Calculate base 2 logarithm */ @@ -206,13 +212,10 @@ public InternalStringStats reduce(List aggregations, Reduce minLength = Math.min(minLength, stats.getMinLength()); maxLength = Math.max(maxLength, stats.getMaxLength()); totalLength += stats.totalLength; - stats.charOccurrences.forEach((k, v) -> - occurs.merge(k, v, (oldValue, newValue) -> oldValue + newValue) - ); + stats.charOccurrences.forEach((k, v) -> occurs.merge(k, v, (oldValue, newValue) -> oldValue + newValue)); } - return new InternalStringStats(name, count, totalLength, minLength, maxLength, occurs, - showDistribution, format, getMetadata()); + return new InternalStringStats(name, count, totalLength, minLength, maxLength, occurs, showDistribution, format, getMetadata()); } @Override @@ -263,7 +266,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.field(Fields.ENTROPY_AS_STRING.getPreferredName(), format.format(getEntropy())); if (showDistribution) { builder.startObject(Fields.DISTRIBUTION_AS_STRING.getPreferredName()); - for (Map.Entry e: getDistribution().entrySet()) { + for (Map.Entry e : getDistribution().entrySet()) { builder.field(e.getKey(), format.format(e.getValue()).toString()); } builder.endObject(); @@ -294,11 +297,11 @@ public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalStringStats other = (InternalStringStats) obj; - return count == other.count && - minLength == other.minLength && - maxLength == other.maxLength && - totalLength == other.totalLength && - Objects.equals(charOccurrences, other.charOccurrences) && - showDistribution == other.showDistribution; + return count == other.count + && minLength == other.minLength + && maxLength == other.maxLength + && totalLength == other.totalLength + && Objects.equals(charOccurrences, other.charOccurrences) + && showDistribution == other.showDistribution; } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java index 41f1355b1e647..29b20d6f6cfda 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java @@ -6,10 +6,10 @@ */ package org.elasticsearch.xpack.analytics.stringstats; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -32,8 +32,10 @@ public class StringStatsAggregationBuilder extends ValuesSourceAggregationBuilde new ValuesSourceRegistry.RegistryKey<>(NAME, StringStatsAggregatorSupplier.class); private static final ParseField SHOW_DISTRIBUTION_FIELD = new ParseField("show_distribution"); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, StringStatsAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + StringStatsAggregationBuilder::new + ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); PARSER.declareBoolean(StringStatsAggregationBuilder::showDistribution, StringStatsAggregationBuilder.SHOW_DISTRIBUTION_FIELD); @@ -45,9 +47,11 @@ public StringStatsAggregationBuilder(String name) { super(name); } - public StringStatsAggregationBuilder(StringStatsAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + public StringStatsAggregationBuilder( + StringStatsAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.showDistribution = clone.showDistribution(); } @@ -79,15 +83,23 @@ public BucketCardinality bucketCardinality() { } @Override - protected StringStatsAggregatorFactory innerBuild(AggregationContext context, - ValuesSourceConfig config, - AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { - StringStatsAggregatorSupplier aggregatorSupplier = - context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); - return new StringStatsAggregatorFactory(name, config, showDistribution, context, - parent, subFactoriesBuilder, metadata, - aggregatorSupplier); + protected StringStatsAggregatorFactory innerBuild( + AggregationContext context, + ValuesSourceConfig config, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { + StringStatsAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); + return new StringStatsAggregatorFactory( + name, + config, + showDistribution, + context, + parent, + subFactoriesBuilder, + metadata, + aggregatorSupplier + ); } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregator.java index a00e449083560..6266812b6d7c5 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregator.java @@ -9,10 +9,10 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -49,8 +49,15 @@ public class StringStatsAggregator extends MetricsAggregator { /** Map that stores the number of occurrences for each character. */ Map charOccurrences; - StringStatsAggregator(String name, ValuesSource valuesSource, boolean showDistribution, DocValueFormat format, - AggregationContext context, Aggregator parent, Map metadata) throws IOException { + StringStatsAggregator( + String name, + ValuesSource valuesSource, + boolean showDistribution, + DocValueFormat format, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, context, parent, metadata); this.showDistribution = showDistribution; this.valuesSource = (ValuesSource.Bytes) valuesSource; @@ -72,8 +79,7 @@ public ScoreMode scoreMode() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } @@ -148,16 +154,32 @@ public InternalAggregation buildAggregation(long bucket) { } } - return new InternalStringStats(name, count.get(bucket), totalLength.get(bucket), - minLength.get(bucket), maxLength.get(bucket), occurrences, showDistribution, - format, metadata()); + return new InternalStringStats( + name, + count.get(bucket), + totalLength.get(bucket), + minLength.get(bucket), + maxLength.get(bucket), + occurrences, + showDistribution, + format, + metadata() + ); } @Override public InternalAggregation buildEmptyAggregation() { - return new InternalStringStats(name, - 0, 0, Integer.MAX_VALUE, Integer.MIN_VALUE, - Collections.emptyMap(), showDistribution, format, metadata()); + return new InternalStringStats( + name, + 0, + 0, + Integer.MAX_VALUE, + Integer.MIN_VALUE, + Collections.emptyMap(), + showDistribution, + format, + metadata() + ); } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorFactory.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorFactory.java index ac219405d24a3..0cde0e927c3ea 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorFactory.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorFactory.java @@ -25,13 +25,16 @@ class StringStatsAggregatorFactory extends ValuesSourceAggregatorFactory { private final StringStatsAggregatorSupplier aggregatorSupplier; private final boolean showDistribution; - StringStatsAggregatorFactory(String name, ValuesSourceConfig config, - Boolean showDistribution, - AggregationContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metadata, - StringStatsAggregatorSupplier aggregatorSupplier) - throws IOException { + StringStatsAggregatorFactory( + String name, + ValuesSourceConfig config, + Boolean showDistribution, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + StringStatsAggregatorSupplier aggregatorSupplier + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.aggregatorSupplier = aggregatorSupplier; @@ -39,9 +42,7 @@ class StringStatsAggregatorFactory extends ValuesSourceAggregatorFactory { } static void registerAggregators(ValuesSourceRegistry.Builder builder) { - builder.register( - StringStatsAggregationBuilder.REGISTRY_KEY, - CoreValuesSourceType.KEYWORD, StringStatsAggregator::new, true); + builder.register(StringStatsAggregationBuilder.REGISTRY_KEY, CoreValuesSourceType.KEYWORD, StringStatsAggregator::new, true); } @Override @@ -50,11 +51,9 @@ protected Aggregator createUnmapped(Aggregator parent, Map metad } @Override - protected Aggregator doCreateInternal(Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { - return aggregatorSupplier - .build(name, config.getValuesSource(), showDistribution, config.format(), context, parent, metadata); + protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) + throws IOException { + return aggregatorSupplier.build(name, config.getValuesSource(), showDistribution, config.format(), context, parent, metadata); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorSupplier.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorSupplier.java index 79f9a7407d8d1..ed3b9da432234 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorSupplier.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorSupplier.java @@ -16,11 +16,13 @@ public interface StringStatsAggregatorSupplier { - Aggregator build(String name, - ValuesSource valuesSource, - boolean showDistribution, - DocValueFormat format, - AggregationContext context, - Aggregator parent, - Map metadata) throws IOException; + Aggregator build( + String name, + ValuesSource valuesSource, + boolean showDistribution, + DocValueFormat format, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException; } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetrics.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetrics.java index d61fafa1aed9a..a27c7d56463a5 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetrics.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetrics.java @@ -7,12 +7,12 @@ package org.elasticsearch.xpack.analytics.topmetrics; import org.apache.lucene.util.PriorityQueue; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.Nullable; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalMultiValueAggregation; @@ -29,8 +29,7 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.search.builder.SearchSourceBuilder.SORT_FIELD; -import static org.elasticsearch.xpack.analytics.topmetrics.TopMetricsAggregationBuilder.METRIC_FIELD; - +import static org.elasticsearch.xpack.analytics.topmetrics.TopMetricsAggregationBuilder.METRIC_FIELD; public class InternalTopMetrics extends InternalMultiValueAggregation { private final SortOrder sortOrder; @@ -38,8 +37,14 @@ public class InternalTopMetrics extends InternalMultiValueAggregation { private final List metricNames; private final List topMetrics; - public InternalTopMetrics(String name, @Nullable SortOrder sortOrder, List metricNames, - int size, List topMetrics, Map metadata) { + public InternalTopMetrics( + String name, + @Nullable SortOrder sortOrder, + List metricNames, + int size, + List topMetrics, + Map metadata + ) { super(name, metadata); this.sortOrder = sortOrder; this.metricNames = metricNames; @@ -157,10 +162,10 @@ public int hashCode() { public boolean equals(Object obj) { if (super.equals(obj) == false) return false; InternalTopMetrics other = (InternalTopMetrics) obj; - return sortOrder.equals(other.sortOrder) && - metricNames.equals(other.metricNames) && - size == other.size && - topMetrics.equals(other.topMetrics); + return sortOrder.equals(other.sortOrder) + && metricNames.equals(other.metricNames) + && size == other.size + && topMetrics.equals(other.topMetrics); } @Override @@ -312,9 +317,7 @@ public boolean equals(Object obj) { return false; } TopMetric other = (TopMetric) obj; - return sortFormat.equals(other.sortFormat) - && sortValue.equals(other.sortValue) - && metricValues.equals(other.metricValues); + return sortFormat.equals(other.sortFormat) && sortValue.equals(other.sortValue) && metricValues.equals(other.metricValues); } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java index c7677ecde427a..04b39d1b31e42 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java @@ -6,12 +6,12 @@ */ package org.elasticsearch.xpack.analytics.topmetrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ContextParser; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -68,24 +68,35 @@ public static void registerAggregators(ValuesSourceRegistry.Builder registry) { */ private static final int DEFAULT_SIZE = 1; - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, - false, (args, name) -> { - @SuppressWarnings("unchecked") - List> sorts = (List>) args[0]; - int size = args[1] == null ? DEFAULT_SIZE : (Integer) args[1]; - if (size < 1) { - throw new IllegalArgumentException("[size] must be more than 0 but was [" + size + "]"); - } - @SuppressWarnings("unchecked") - List metricFields = (List) args[2]; - return new TopMetricsAggregationBuilder(name, sorts, size, metricFields); - }); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + false, + (args, name) -> { + @SuppressWarnings("unchecked") + List> sorts = (List>) args[0]; + int size = args[1] == null ? DEFAULT_SIZE : (Integer) args[1]; + if (size < 1) { + throw new IllegalArgumentException("[size] must be more than 0 but was [" + size + "]"); + } + @SuppressWarnings("unchecked") + List metricFields = (List) args[2]; + return new TopMetricsAggregationBuilder(name, sorts, size, metricFields); + } + ); static { - PARSER.declareField(constructorArg(), (p, n) -> SortBuilder.fromXContent(p), SORT_FIELD, - ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); + PARSER.declareField( + constructorArg(), + (p, n) -> SortBuilder.fromXContent(p), + SORT_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING + ); PARSER.declareInt(optionalConstructorArg(), SIZE_FIELD); - ContextParser metricParser = - MultiValuesSourceFieldConfig.parserBuilder(true, false, false, false); + ContextParser metricParser = MultiValuesSourceFieldConfig.parserBuilder( + true, + false, + false, + false + ); PARSER.declareObjectArray(constructorArg(), (p, n) -> metricParser.parse(p, null).build(), METRIC_FIELD); } @@ -97,8 +108,12 @@ public static void registerAggregators(ValuesSourceRegistry.Builder registry) { /** * Build a {@code top_metrics} aggregation request. */ - public TopMetricsAggregationBuilder(String name, List> sortBuilders, int size, - List metricFields) { + public TopMetricsAggregationBuilder( + String name, + List> sortBuilders, + int size, + List metricFields + ) { super(name); if (sortBuilders.size() != 1) { throw new IllegalArgumentException("[sort] must contain exactly one sort"); @@ -111,8 +126,11 @@ public TopMetricsAggregationBuilder(String name, List> sortBuilde /** * Cloning ctor for reducing. */ - public TopMetricsAggregationBuilder(TopMetricsAggregationBuilder clone, AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + public TopMetricsAggregationBuilder( + TopMetricsAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); this.sortBuilders = clone.sortBuilders; this.size = clone.size; @@ -150,7 +168,7 @@ public BucketCardinality bucketCardinality() { @Override protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, Builder subFactoriesBuilder) - throws IOException { + throws IOException { return new TopMetricsAggregatorFactory(name, context, parent, subFactoriesBuilder, metadata, sortBuilders, size, metricFields); } @@ -165,7 +183,7 @@ protected XContentBuilder internalXContent(XContentBuilder builder, Params param builder.endArray(); builder.field(SIZE_FIELD.getPreferredName(), size); builder.startArray(METRIC_FIELD.getPreferredName()); - for (MultiValuesSourceFieldConfig metricField: metricFields) { + for (MultiValuesSourceFieldConfig metricField : metricFields) { metricField.toXContent(builder, params); } builder.endArray(); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java index 1b99aa93272fe..575be34199aba 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java @@ -13,13 +13,13 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; @@ -231,7 +231,9 @@ abstract static class MetricValues implements BucketedSort.ExtraData, Releasable } abstract boolean needsScores(); + abstract double doubleValue(long index); + abstract InternalTopMetrics.MetricValue metricValue(long index) throws IOException; } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorFactory.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorFactory.java index 629a0949886aa..e79dd650219ec 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorFactory.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorFactory.java @@ -33,16 +33,28 @@ public class TopMetricsAggregatorFactory extends AggregatorFactory { * can be collected per bucket. This defaults to a low number because * there can be a *huge* number of buckets */ - public static final Setting MAX_BUCKET_SIZE = - Setting.intSetting("index.top_metrics_max_size", 10, 1, Property.Dynamic, Property.IndexScope); + public static final Setting MAX_BUCKET_SIZE = Setting.intSetting( + "index.top_metrics_max_size", + 10, + 1, + Property.Dynamic, + Property.IndexScope + ); private final List> sortBuilders; private final int size; private final List metricFields; - public TopMetricsAggregatorFactory(String name, AggregationContext context, AggregatorFactory parent, - Builder subFactoriesBuilder, Map metadata, List> sortBuilders, - int size, List metricFields) throws IOException { + public TopMetricsAggregatorFactory( + String name, + AggregationContext context, + AggregatorFactory parent, + Builder subFactoriesBuilder, + Map metadata, + List> sortBuilders, + int size, + List metricFields + ) throws IOException { super(name, context, parent, subFactoriesBuilder, metadata); this.sortBuilders = sortBuilders; this.size = size; @@ -54,9 +66,15 @@ protected TopMetricsAggregator createInternal(Aggregator parent, CardinalityUppe throws IOException { int maxBucketSize = MAX_BUCKET_SIZE.get(context.getIndexSettings().getSettings()); if (size > maxBucketSize) { - throw new IllegalArgumentException("[top_metrics.size] must not be more than [" + maxBucketSize + "] but was [" + size - + "]. This limit can be set by changing the [" + MAX_BUCKET_SIZE.getKey() - + "] index level setting."); + throw new IllegalArgumentException( + "[top_metrics.size] must not be more than [" + + maxBucketSize + + "] but was [" + + size + + "]. This limit can be set by changing the [" + + MAX_BUCKET_SIZE.getKey() + + "] index level setting." + ); } MetricValues[] metricValues = new MetricValues[metricFields.size()]; for (int i = 0; i < metricFields.size(); i++) { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java index b312ae4cbcc71..138698f6574aa 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java @@ -97,4 +97,3 @@ public double getValue() { } } - diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/PairedTTestAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/PairedTTestAggregator.java index 8d43624c3db57..eadf52e08bf02 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/PairedTTestAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/PairedTTestAggregator.java @@ -28,8 +28,15 @@ public class PairedTTestAggregator extends TTestAggregator { private TTestStatsBuilder statsBuilder; - PairedTTestAggregator(String name, MultiValuesSource.NumericMultiValuesSource valuesSources, int tails, DocValueFormat format, - AggregationContext context, Aggregator parent, Map metadata) throws IOException { + PairedTTestAggregator( + String name, + MultiValuesSource.NumericMultiValuesSource valuesSources, + int tails, + DocValueFormat format, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, valuesSources, tails, format, context, parent, metadata); statsBuilder = new TTestStatsBuilder(bigArrays()); } @@ -50,8 +57,7 @@ protected long size() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSources == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } @@ -65,8 +71,10 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, public void collect(int doc, long bucket) throws IOException { if (docAValues.advanceExact(doc) && docBValues.advanceExact(doc)) { if (docAValues.docValueCount() > 1 || docBValues.docValueCount() > 1) { - throw new AggregationExecutionException("Encountered more than one value for a " + - "single document. Use a script to combine multiple values per doc into a single value."); + throw new AggregationExecutionException( + "Encountered more than one value for a " + + "single document. Use a script to combine multiple values per doc into a single value." + ); } statsBuilder.grow(bigArrays(), bucket + 1); // There should always be one value if advanceExact lands us here, either diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/PairedTTestState.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/PairedTTestState.java index fd62faf73c4de..aeb2cfc56e946 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/PairedTTestState.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/PairedTTestState.java @@ -64,8 +64,9 @@ public TTestState reduce(Stream states) { PairedTTestState state = (PairedTTestState) tTestState; reducer.accept(state.stats); if (state.tails != tails) { - throw new IllegalStateException("Incompatible tails value in the reduce. Expected " - + state.tails + " reduced with " + tails); + throw new IllegalStateException( + "Incompatible tails value in the reduce. Expected " + state.tails + " reduced with " + tails + ); } }); return new PairedTTestState(reducer.result(), tails); @@ -81,8 +82,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PairedTTestState that = (PairedTTestState) o; - return tails == that.tails && - stats.equals(that.stats); + return tails == that.tails && stats.equals(that.stats); } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java index 57d098c94d8d0..64056b722376c 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java @@ -7,10 +7,10 @@ package org.elasticsearch.xpack.analytics.ttest; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -41,8 +41,7 @@ public class TTestAggregationBuilder extends MultiValuesSourceAggregationBuilder public static final ParseField TYPE_FIELD = new ParseField("type"); public static final ParseField TAILS_FIELD = new ParseField("tails"); - public static final ObjectParser PARSER = - ObjectParser.fromBuilder(NAME, TTestAggregationBuilder::new); + public static final ObjectParser PARSER = ObjectParser.fromBuilder(NAME, TTestAggregationBuilder::new); static { MultiValuesSourceParseHelper.declareCommon(PARSER, true, ValueType.NUMERIC); @@ -64,9 +63,11 @@ public TTestAggregationBuilder(String name) { super(name); } - public TTestAggregationBuilder(TTestAggregationBuilder clone, - AggregatorFactories.Builder factoriesBuilder, - Map metadata) { + public TTestAggregationBuilder( + TTestAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { super(clone, factoriesBuilder, metadata); } @@ -91,8 +92,7 @@ public TTestAggregationBuilder testType(TTestType testType) { public TTestAggregationBuilder tails(int tails) { if (tails < 1 || tails > 2) { - throw new IllegalArgumentException( - "[tails] must be 1 or 2. Found [" + tails + "] in [" + name + "]"); + throw new IllegalArgumentException("[tails] must be 1 or 2. Found [" + tails + "] in [" + name + "]"); } this.tails = tails; return this; @@ -132,7 +132,8 @@ protected MultiValuesSourceAggregatorFactory innerBuild( Map filters, DocValueFormat format, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { + AggregatorFactories.Builder subFactoriesBuilder + ) throws IOException { QueryBuilder filterA = filters.get(A_FIELD.getPreferredName()); QueryBuilder filterB = filters.get(B_FIELD.getPreferredName()); if (filterA == null && filterB == null) { @@ -140,15 +141,26 @@ protected MultiValuesSourceAggregatorFactory innerBuild( FieldContext fieldContextB = configs.get(B_FIELD.getPreferredName()).fieldContext(); if (fieldContextA != null && fieldContextB != null) { if (fieldContextA.field().equals(fieldContextB.field())) { - throw new IllegalArgumentException("The same field [" + fieldContextA.field() + - "] is used for both population but no filters are specified."); + throw new IllegalArgumentException( + "The same field [" + fieldContextA.field() + "] is used for both population but no filters are specified." + ); } } } - return new TTestAggregatorFactory(name, configs, testType, tails, - filterA, filterB, format, context, parent, - subFactoriesBuilder, metadata); + return new TTestAggregatorFactory( + name, + configs, + testType, + tails, + filterA, + filterB, + format, + context, + parent, + subFactoriesBuilder, + metadata + ); } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregator.java index 10b295239bbe2..807ceccf3a776 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregator.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.Map; - public abstract class TTestAggregator extends NumericMetricsAggregator.SingleValue { protected final MultiValuesSource.NumericMultiValuesSource valuesSources; @@ -26,9 +25,16 @@ public abstract class TTestAggregator extends NumericMetri private DocValueFormat format; - TTestAggregator(String name, MultiValuesSource.NumericMultiValuesSource valuesSources, int tails, DocValueFormat format, - AggregationContext context, Aggregator parent, Map metadata) throws IOException { - super(name, context, parent, metadata); + TTestAggregator( + String name, + MultiValuesSource.NumericMultiValuesSource valuesSources, + int tails, + DocValueFormat format, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { + super(name, context, parent, metadata); this.valuesSources = valuesSources; this.tails = tails; this.format = format; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorFactory.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorFactory.java index 5740be6214490..c45e9bafd4bdf 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorFactory.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorFactory.java @@ -37,11 +37,19 @@ class TTestAggregatorFactory extends MultiValuesSourceAggregatorFactory { private final Query filterB; private Tuple weights; - TTestAggregatorFactory(String name, Map configs, TTestType testType, int tails, - QueryBuilder filterA, QueryBuilder filterB, - DocValueFormat format, AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metadata) throws IOException { + TTestAggregatorFactory( + String name, + Map configs, + TTestType testType, + int tails, + QueryBuilder filterA, + QueryBuilder filterB, + DocValueFormat format, + AggregationContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { super(name, configs, format, context, parent, subFactoriesBuilder, metadata); this.testType = testType; this.tails = tails; @@ -82,11 +90,9 @@ protected Aggregator doCreateInternal( } return new PairedTTestAggregator(name, numericMultiVS, tails, format, context, parent, metadata); case HOMOSCEDASTIC: - return new UnpairedTTestAggregator(name, numericMultiVS, tails, true, this::getWeights, format, context, parent, - metadata); + return new UnpairedTTestAggregator(name, numericMultiVS, tails, true, this::getWeights, format, context, parent, metadata); case HETEROSCEDASTIC: - return new UnpairedTTestAggregator(name, numericMultiVS, tails, false, this::getWeights, format, context, - parent, metadata); + return new UnpairedTTestAggregator(name, numericMultiVS, tails, false, this::getWeights, format, context, parent, metadata); default: throw new IllegalArgumentException("Unsupported t-test type " + testType); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStats.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStats.java index 724880d864a41..8a6be981f69cb 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStats.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStats.java @@ -74,9 +74,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TTestStats that = (TTestStats) o; - return count == that.count && - Double.compare(that.sum, sum) == 0 && - Double.compare(that.sumOfSqrs, sumOfSqrs) == 0; + return count == that.count && Double.compare(that.sum, sum) == 0 && Double.compare(that.sumOfSqrs, sumOfSqrs) == 0; } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStatsBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStatsBuilder.java index 5ea50e5032b75..25755a745a387 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStatsBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStatsBuilder.java @@ -7,11 +7,11 @@ package org.elasticsearch.xpack.analytics.ttest; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; public class TTestStatsBuilder implements Releasable { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestType.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestType.java index 05923d58c5b30..7b901ea792601 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestType.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestType.java @@ -13,7 +13,9 @@ * T-test type, paired, unpaired equal variance, unpaired unequal variance */ public enum TTestType { - PAIRED, HOMOSCEDASTIC, HETEROSCEDASTIC; + PAIRED, + HOMOSCEDASTIC, + HETEROSCEDASTIC; public static TTestType resolve(String name) { return TTestType.valueOf(name.toUpperCase(Locale.ROOT)); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/UnpairedTTestAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/UnpairedTTestAggregator.java index 6e7002d4cd2f9..5910569360988 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/UnpairedTTestAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/UnpairedTTestAggregator.java @@ -10,9 +10,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -35,9 +35,17 @@ public class UnpairedTTestAggregator extends TTestAggregator private final boolean homoscedastic; private final Supplier> weightsSupplier; - UnpairedTTestAggregator(String name, MultiValuesSource.NumericMultiValuesSource valuesSources, int tails, boolean homoscedastic, - Supplier> weightsSupplier, DocValueFormat format, AggregationContext context, - Aggregator parent, Map metadata) throws IOException { + UnpairedTTestAggregator( + String name, + MultiValuesSource.NumericMultiValuesSource valuesSources, + int tails, + boolean homoscedastic, + Supplier> weightsSupplier, + DocValueFormat format, + AggregationContext context, + Aggregator parent, + Map metadata + ) throws IOException { super(name, valuesSources, tails, format, context, parent, metadata); a = new TTestStatsBuilder(bigArrays()); b = new TTestStatsBuilder(bigArrays()); @@ -61,8 +69,7 @@ protected long size() { } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSources == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } @@ -78,8 +85,14 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, return new LeafBucketCollectorBase(sub, docAValues) { - private void processValues(int doc, long bucket, SortedNumericDoubleValues docValues, CompensatedSum compSum, - CompensatedSum compSumOfSqr, TTestStatsBuilder builder) throws IOException { + private void processValues( + int doc, + long bucket, + SortedNumericDoubleValues docValues, + CompensatedSum compSum, + CompensatedSum compSumOfSqr, + TTestStatsBuilder builder + ) throws IOException { if (docValues.advanceExact(doc)) { final int numValues = docValues.docValueCount(); for (int i = 0; i < numValues; i++) { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/UnpairedTTestState.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/UnpairedTTestState.java index 36c507514c27d..58de3be40be83 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/UnpairedTTestState.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/UnpairedTTestState.java @@ -69,7 +69,6 @@ private double p(double sd2, double degreesOfFreedom) { return dist.cumulativeProbability(-t) * tails; } - @Override public TTestState reduce(Stream states) { TTestStats.Reducer reducerA = new TTestStats.Reducer(); @@ -77,12 +76,14 @@ public TTestState reduce(Stream states) { states.forEach(tTestState -> { UnpairedTTestState state = (UnpairedTTestState) tTestState; if (state.homoscedastic != homoscedastic) { - throw new IllegalStateException("Incompatible homoscedastic mode in the reduce. Expected " - + state.homoscedastic + " reduced with " + homoscedastic); + throw new IllegalStateException( + "Incompatible homoscedastic mode in the reduce. Expected " + state.homoscedastic + " reduced with " + homoscedastic + ); } if (state.tails != tails) { - throw new IllegalStateException("Incompatible tails value in the reduce. Expected " - + state.tails + " reduced with " + tails); + throw new IllegalStateException( + "Incompatible tails value in the reduce. Expected " + state.tails + " reduced with " + tails + ); } reducerA.accept(state.a); reducerB.accept(state.b); @@ -108,10 +109,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UnpairedTTestState that = (UnpairedTTestState) o; - return homoscedastic == that.homoscedastic && - tails == that.tails && - a.equals(that.a) && - b.equals(that.b); + return homoscedastic == that.homoscedastic && tails == that.tails && a.equals(that.a) && b.equals(that.b); } @Override diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/AnalyticsTestsUtils.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/AnalyticsTestsUtils.java index 74ee07c1cd6b3..f9125e7d188c2 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/AnalyticsTestsUtils.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/AnalyticsTestsUtils.java @@ -5,10 +5,10 @@ * 2.0. */ - package org.elasticsearch.xpack.analytics; -import java.io.IOException; +import com.tdunning.math.stats.Centroid; +import com.tdunning.math.stats.TDigest; import org.HdrHistogram.DoubleHistogram; import org.HdrHistogram.DoubleHistogramIterationValue; @@ -16,8 +16,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.search.aggregations.metrics.TDigestState; -import com.tdunning.math.stats.Centroid; -import com.tdunning.math.stats.TDigest; +import java.io.IOException; public final class AnalyticsTestsUtils { @@ -25,7 +24,7 @@ public final class AnalyticsTestsUtils { * Generates an index fields for histogram fields. Used in tests of aggregations that work on histogram fields. */ public static BinaryDocValuesField histogramFieldDocValues(String fieldName, double[] values) throws IOException { - TDigest histogram = new TDigestState(100.0); //default + TDigest histogram = new TDigestState(100.0); // default for (double value : values) { histogram.add(value); } @@ -41,12 +40,12 @@ public static BinaryDocValuesField histogramFieldDocValues(String fieldName, dou public static BinaryDocValuesField hdrHistogramFieldDocValues(String fieldName, double[] values) throws IOException { DoubleHistogram histogram = new DoubleHistogram(3); histogram.setAutoResize(true); - for (double value: values) { + for (double value : values) { histogram.recordValue(value); } BytesStreamOutput streamOutput = new BytesStreamOutput(); - for(DoubleHistogramIterationValue value : histogram.recordedValues()) { - streamOutput.writeVInt((int)value.getCountAtValueIteratedTo()); + for (DoubleHistogramIterationValue value : histogram.recordedValues()) { + streamOutput.writeVInt((int) value.getCountAtValueIteratedTo()); streamOutput.writeDouble(value.getValueIteratedTo()); } return new BinaryDocValuesField(fieldName, streamOutput.bytes().toBytesRef()); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/AnalyticsInfoTransportActionTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/AnalyticsInfoTransportActionTests.java index a9f1e07241bc2..f228a714f199b 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/AnalyticsInfoTransportActionTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/AnalyticsInfoTransportActionTests.java @@ -58,12 +58,17 @@ public void init() { } public void testAvailable() throws Exception { - AnalyticsInfoTransportAction featureSet = new AnalyticsInfoTransportAction( - mock(TransportService.class), mock(ActionFilters.class)); + AnalyticsInfoTransportAction featureSet = new AnalyticsInfoTransportAction(mock(TransportService.class), mock(ActionFilters.class)); assertThat(featureSet.available(), is(true)); Client client = mockClient(); - AnalyticsUsageTransportAction usageAction = new AnalyticsUsageTransportAction(mock(TransportService.class), clusterService, null, - mock(ActionFilters.class), null, client); + AnalyticsUsageTransportAction usageAction = new AnalyticsUsageTransportAction( + mock(TransportService.class), + clusterService, + null, + mock(ActionFilters.class), + null, + client + ); PlainActionFuture future = new PlainActionFuture<>(); usageAction.masterOperation(task, null, clusterState, future); XPackFeatureSet.Usage usage = future.get().getUsage(); @@ -78,13 +83,18 @@ public void testAvailable() throws Exception { } public void testEnabled() throws Exception { - AnalyticsInfoTransportAction featureSet = new AnalyticsInfoTransportAction( - mock(TransportService.class), mock(ActionFilters.class)); + AnalyticsInfoTransportAction featureSet = new AnalyticsInfoTransportAction(mock(TransportService.class), mock(ActionFilters.class)); assertThat(featureSet.enabled(), is(true)); assertTrue(featureSet.enabled()); Client client = mockClient(); - AnalyticsUsageTransportAction usageAction = new AnalyticsUsageTransportAction(mock(TransportService.class), - clusterService, null, mock(ActionFilters.class), null, client); + AnalyticsUsageTransportAction usageAction = new AnalyticsUsageTransportAction( + mock(TransportService.class), + clusterService, + null, + mock(ActionFilters.class), + null, + client + ); PlainActionFuture future = new PlainActionFuture<>(); usageAction.masterOperation(task, null, clusterState, future); XPackFeatureSet.Usage usage = future.get().getUsage(); @@ -102,8 +112,8 @@ private Client mockClient() { Client client = mock(Client.class); doAnswer((Answer) invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = - (ActionListener) invocation.getArguments()[2]; + ActionListener listener = (ActionListener) invocation + .getArguments()[2]; listener.onResponse(new AnalyticsStatsAction.Response(clusterName, Collections.emptyList(), Collections.emptyList())); return null; }).when(client).execute(eq(AnalyticsStatsAction.INSTANCE), any(), any()); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/AnalyticsStatsActionNodeResponseTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/AnalyticsStatsActionNodeResponseTests.java index 1cfbff726960a..a093f58c003e0 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/AnalyticsStatsActionNodeResponseTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/AnalyticsStatsActionNodeResponseTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.common.stats.EnumCounters; import org.elasticsearch.xpack.core.analytics.action.AnalyticsStatsAction; +import org.elasticsearch.xpack.core.common.stats.EnumCounters; import static org.hamcrest.Matchers.equalTo; diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsActionTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsActionTests.java index 312008389bbcd..bbb75405fbfb2 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsActionTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/action/TransportAnalyticsStatsActionTests.java @@ -50,13 +50,17 @@ public TransportAnalyticsStatsAction action(AnalyticsUsage usage) { ClusterName clusterName = new ClusterName("cluster_name"); when(clusterService.getClusterName()).thenReturn(clusterName); - ClusterState clusterState = mock(ClusterState.class); when(clusterState.getMetadata()).thenReturn(Metadata.EMPTY_METADATA); when(clusterService.state()).thenReturn(clusterState); - return new TransportAnalyticsStatsAction(transportService, clusterService, threadPool, - new ActionFilters(Collections.emptySet()), usage); + return new TransportAnalyticsStatsAction( + transportService, + clusterService, + threadPool, + new ActionFilters(Collections.emptySet()), + usage + ); } public void test() throws IOException { @@ -78,10 +82,13 @@ public void test() throws IOException { private ObjectPath run(AnalyticsUsage... nodeUsages) throws IOException { AnalyticsStatsAction.Request request = new AnalyticsStatsAction.Request(); List nodeResponses = Arrays.stream(nodeUsages) - .map(usage -> action(usage).nodeOperation(new AnalyticsStatsAction.NodeRequest(request), null)) - .collect(toList()); + .map(usage -> action(usage).nodeOperation(new AnalyticsStatsAction.NodeRequest(request), null)) + .collect(toList()); AnalyticsStatsAction.Response response = new AnalyticsStatsAction.Response( - new ClusterName("cluster_name"), nodeResponses, emptyList()); + new ClusterName("cluster_name"), + nodeResponses, + emptyList() + ); AnalyticsFeatureSetUsage usage = new AnalyticsFeatureSetUsage(true, true, response); try (XContentBuilder builder = jsonBuilder()) { diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java index 03e70af6f7c87..413debfa8e1e4 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java @@ -36,15 +36,12 @@ public class HistoBackedHistogramAggregatorTests extends AggregatorTestCase { private static final String FIELD_NAME = "field"; public void testHistograms() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {0, 1.2, 10, 12, 24}))); - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 6, 20}))); - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 10, 10, 30}))); - - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field(FIELD_NAME) - .interval(5); + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 0, 1.2, 10, 12, 24 }))); + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 6, 20 }))); + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 10, 10, 30 }))); + + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME).interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(FIELD_NAME)); @@ -73,16 +70,12 @@ public void testHistograms() throws Exception { } public void testMinDocCount() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {0, 1.2, 10, 12, 24}))); - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 6, 20}))); - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 10, 10, 30, 90}))); - - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field(FIELD_NAME) - .interval(5) - .minDocCount(2); + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 0, 1.2, 10, 12, 24 }))); + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 6, 20 }))); + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 10, 10, 30, 90 }))); + + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME).interval(5).minDocCount(2); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(FIELD_NAME)); @@ -102,15 +95,15 @@ public void testMinDocCount() throws Exception { public void testHistogramWithDocCountField() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - w.addDocument(List.of( - // Add the _doc_dcount field - new CustomTermFreqField("_doc_count", "_doc_count", 8), - histogramFieldDocValues(FIELD_NAME, new double[] {0, 1.2, 10, 10, 12, 24, 24, 24})) + w.addDocument( + List.of( + // Add the _doc_dcount field + new CustomTermFreqField("_doc_count", "_doc_count", 8), + histogramFieldDocValues(FIELD_NAME, new double[] { 0, 1.2, 10, 10, 12, 24, 24, 24 }) + ) ); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field(FIELD_NAME) - .interval(100); + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME).interval(100); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); @@ -122,17 +115,15 @@ public void testHistogramWithDocCountField() throws Exception { } public void testRandomOffset() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { // Note, these values are carefully chosen to ensure that no matter what offset we pick, no two can end up in the same bucket - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {3.2, 9.3}))); - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-5, 3.2 }))); + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 3.2, 9.3 }))); + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -5, 3.2 }))); final double offset = randomDouble(); final double interval = 5; final double expectedOffset = offset % interval; - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field(FIELD_NAME) + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME) .interval(interval) .offset(offset) .minDocCount(1); @@ -156,14 +147,12 @@ public void testRandomOffset() throws Exception { } public void testExtendedBounds() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-4.5, 4.3}))); - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-5, 3.2 }))); + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -4.5, 4.3 }))); + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -5, 3.2 }))); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field(FIELD_NAME) + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME) .interval(5) .extendedBounds(-12, 13); try (IndexReader reader = w.getReader()) { @@ -213,21 +202,20 @@ public void testHardBounds() throws Exception { * Test that sub-aggregations are not supported */ public void testSubAggs() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-4.5, 4.3}))); - w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-5, 3.2 }))); + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -4.5, 4.3 }))); + w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -5, 3.2 }))); - HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") - .field(FIELD_NAME) + HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME) .interval(5) .extendedBounds(-12, 13) .subAggregation(new TopHitsAggregationBuilder("top_hits")); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(FIELD_NAME)) ); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java index 52d6bf72b9c83..90a2d0611b9b3 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java @@ -50,31 +50,23 @@ public void testPercentilesAccuracy() throws Exception { long absError = 0L; long docCount = 0L; for (int k = 0; k < 10; k++) { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { docCount += generateDocs(w); - double[] steps = IntStream.range(2, 99) - .filter(i -> i % 2 == 0) - .mapToDouble(Double::valueOf) - .toArray(); + double[] steps = IntStream.range(2, 99).filter(i -> i % 2 == 0).mapToDouble(Double::valueOf).toArray(); - PercentilesAggregationBuilder rawPercentiles = new PercentilesAggregationBuilder("my_agg") - .field(RAW_FIELD_NAME) + PercentilesAggregationBuilder rawPercentiles = new PercentilesAggregationBuilder("my_agg").field(RAW_FIELD_NAME) .percentilesConfig(new PercentilesConfig.Hdr()) .percentiles(steps); - PercentilesAggregationBuilder aggregatedPercentiles = new PercentilesAggregationBuilder("my_agg") - .field(HISTO_FIELD_NAME) + PercentilesAggregationBuilder aggregatedPercentiles = new PercentilesAggregationBuilder("my_agg").field(HISTO_FIELD_NAME) .percentilesConfig(new PercentilesConfig.Hdr()) .percentiles(steps); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg") - .field(HISTO_FIELD_NAME); + RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg").field(HISTO_FIELD_NAME); - RangeAggregationBuilder rawFieldAgg = new RangeAggregationBuilder("my_agg") - .field(RAW_FIELD_NAME); + RangeAggregationBuilder rawFieldAgg = new RangeAggregationBuilder("my_agg").field(RAW_FIELD_NAME); Percentiles rawPercentileResults = searchAndReduce( searcher, new MatchAllDocsQuery(), @@ -93,7 +85,8 @@ public void testPercentilesAccuracy() throws Exception { for (int i = 1; i < steps.length; i++) { aggBuilder.addRange( aggregatedPercentileResults.percentile(steps[i - 1]), - aggregatedPercentileResults.percentile(steps[i])); + aggregatedPercentileResults.percentile(steps[i]) + ); rawFieldAgg.addRange(rawPercentileResults.percentile(steps[i - 1]), rawPercentileResults.percentile(steps[i])); } aggBuilder.addUnboundedFrom(aggregatedPercentileResults.percentile(steps[steps.length - 1])); @@ -117,20 +110,21 @@ public void testPercentilesAccuracy() throws Exception { } } } - assertThat((double)absError/docCount, lessThan(0.1)); + assertThat((double) absError / docCount, lessThan(0.1)); } @SuppressWarnings("rawtypes") public void testMediumRangesAccuracy() throws Exception { List ranges = Arrays.asList( - new RangeAggregator.Range(null, null, 2.0), - new RangeAggregator.Range(null, 2.0, 4.0), - new RangeAggregator.Range(null, 4.0, 6.0), - new RangeAggregator.Range(null, 6.0, 8.0), - new RangeAggregator.Range(null, 8.0, 9.0), - new RangeAggregator.Range(null, 8.0, 11.0), - new RangeAggregator.Range(null, 11.0, 12.0), - new RangeAggregator.Range(null, 12.0, null)); + new RangeAggregator.Range(null, null, 2.0), + new RangeAggregator.Range(null, 2.0, 4.0), + new RangeAggregator.Range(null, 4.0, 6.0), + new RangeAggregator.Range(null, 6.0, 8.0), + new RangeAggregator.Range(null, 8.0, 9.0), + new RangeAggregator.Range(null, 8.0, 11.0), + new RangeAggregator.Range(null, 11.0, 12.0), + new RangeAggregator.Range(null, 12.0, null) + ); testRanges(ranges, "manual_medium_ranges"); } @@ -138,7 +132,8 @@ public void testLargerRangesAccuracy() throws Exception { List ranges = Arrays.asList( new RangeAggregator.Range(null, null, 8.0), new RangeAggregator.Range(null, 8.0, 12.0), - new RangeAggregator.Range(null, 12.0, null)); + new RangeAggregator.Range(null, 12.0, null) + ); testRanges(ranges, "manual_big_ranges"); } @@ -168,7 +163,7 @@ public void testSmallerRangesAccuracy() throws Exception { new RangeAggregator.Range(null, 11.0, 11.5), new RangeAggregator.Range(null, 11.5, 12.0), new RangeAggregator.Range(null, 12.0, null) - ); + ); testRanges(ranges, "manual_small_ranges"); } @@ -177,16 +172,13 @@ private void testRanges(List ranges, String name) throws long absError = 0L; long docCount = 0L; for (int k = 0; k < 10; k++) { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { docCount += generateDocs(w); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg") - .field(HISTO_FIELD_NAME); - RangeAggregationBuilder rawFieldAgg = new RangeAggregationBuilder("my_agg") - .field(RAW_FIELD_NAME); + RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg").field(HISTO_FIELD_NAME); + RangeAggregationBuilder rawFieldAgg = new RangeAggregationBuilder("my_agg").field(RAW_FIELD_NAME); ranges.forEach(r -> { aggBuilder.addRange(r); rawFieldAgg.addRange(r); @@ -210,28 +202,32 @@ private void testRanges(List ranges, String name) throws } } } - assertThat("test " + name, (double)absError/docCount, lessThan(0.1)); + assertThat("test " + name, (double) absError / docCount, lessThan(0.1)); } @SuppressWarnings("rawtypes") public void testOverlapping() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - w.addDocument(Arrays.asList( - histogramFieldDocValues(HISTO_FIELD_NAME, new double[] {0, 1.2, 10, 12, 24}, new int[] {3, 1, 2, 4, 6}), - new CustomTermFreqField("_doc_count", "_doc_count", 16)) + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + w.addDocument( + Arrays.asList( + histogramFieldDocValues(HISTO_FIELD_NAME, new double[] { 0, 1.2, 10, 12, 24 }, new int[] { 3, 1, 2, 4, 6 }), + new CustomTermFreqField("_doc_count", "_doc_count", 16) + ) ); - w.addDocument(Arrays.asList( - histogramFieldDocValues(HISTO_FIELD_NAME, new double[] {5.3, 6, 6, 20}, new int[] {1, 3, 4, 5}), - new CustomTermFreqField("_doc_count", "_doc_count", 13)) + w.addDocument( + Arrays.asList( + histogramFieldDocValues(HISTO_FIELD_NAME, new double[] { 5.3, 6, 6, 20 }, new int[] { 1, 3, 4, 5 }), + new CustomTermFreqField("_doc_count", "_doc_count", 13) + ) ); - w.addDocument(Arrays.asList( - histogramFieldDocValues(HISTO_FIELD_NAME, new double[] {-10, 0.01, 10, 10, 30}, new int[] {10, 2, 4, 14, 11}), - new CustomTermFreqField("_doc_count", "_doc_count", 41)) + w.addDocument( + Arrays.asList( + histogramFieldDocValues(HISTO_FIELD_NAME, new double[] { -10, 0.01, 10, 10, 30 }, new int[] { 10, 2, 4, 14, 11 }), + new CustomTermFreqField("_doc_count", "_doc_count", 41) + ) ); - RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg") - .field(HISTO_FIELD_NAME) + RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg").field(HISTO_FIELD_NAME) .addUnboundedTo(0) .addRange(5, 10) .addRange(7, 10) @@ -276,23 +272,27 @@ public void testOverlapping() throws Exception { @SuppressWarnings("rawtypes") public void testNonOverlapping() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - w.addDocument(Arrays.asList( - histogramFieldDocValues(HISTO_FIELD_NAME, new double[] {0, 1.2, 10, 12, 24}, new int[] {3, 1, 2, 4, 6}), - new CustomTermFreqField("_doc_count", "_doc_count", 16)) + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + w.addDocument( + Arrays.asList( + histogramFieldDocValues(HISTO_FIELD_NAME, new double[] { 0, 1.2, 10, 12, 24 }, new int[] { 3, 1, 2, 4, 6 }), + new CustomTermFreqField("_doc_count", "_doc_count", 16) + ) ); - w.addDocument(Arrays.asList( - histogramFieldDocValues(HISTO_FIELD_NAME, new double[] {5.3, 6, 6, 20}, new int[] {1, 3, 4, 5}), - new CustomTermFreqField("_doc_count", "_doc_count", 13)) + w.addDocument( + Arrays.asList( + histogramFieldDocValues(HISTO_FIELD_NAME, new double[] { 5.3, 6, 6, 20 }, new int[] { 1, 3, 4, 5 }), + new CustomTermFreqField("_doc_count", "_doc_count", 13) + ) ); - w.addDocument(Arrays.asList( - histogramFieldDocValues(HISTO_FIELD_NAME, new double[] {-10, 0.01, 10, 10, 30}, new int[] {10, 2, 4, 14, 11}), - new CustomTermFreqField("_doc_count", "_doc_count", 41)) + w.addDocument( + Arrays.asList( + histogramFieldDocValues(HISTO_FIELD_NAME, new double[] { -10, 0.01, 10, 10, 30 }, new int[] { 10, 2, 4, 14, 11 }), + new CustomTermFreqField("_doc_count", "_doc_count", 41) + ) ); - RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg") - .field(HISTO_FIELD_NAME) + RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg").field(HISTO_FIELD_NAME) .addUnboundedTo(0) .addRange(0, 10) .addRange(10, 20) @@ -324,19 +324,18 @@ public void testNonOverlapping() throws Exception { } public void testSubAggs() throws Exception { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - w.addDocument(singleton(histogramFieldDocValues(HISTO_FIELD_NAME, new double[] {-4.5, 4.3}))); - w.addDocument(singleton(histogramFieldDocValues(HISTO_FIELD_NAME, new double[] {-5, 3.2 }))); + w.addDocument(singleton(histogramFieldDocValues(HISTO_FIELD_NAME, new double[] { -4.5, 4.3 }))); + w.addDocument(singleton(histogramFieldDocValues(HISTO_FIELD_NAME, new double[] { -5, 3.2 }))); - RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg") - .field(HISTO_FIELD_NAME) + RangeAggregationBuilder aggBuilder = new RangeAggregationBuilder("my_agg").field(HISTO_FIELD_NAME) .addRange(-1.0, 3.0) .subAggregation(new TopHitsAggregationBuilder("top_hits")); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(HISTO_FIELD_NAME)) ); assertEquals("Range aggregation on histogram fields does not support sub-aggregations", e.getMessage()); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java index 96c41f6dd5429..a2699de8aca36 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java @@ -48,22 +48,23 @@ protected List getSearchPlugins() { @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new PercentileRanksAggregationBuilder("hdr_percentiles", new double[]{1.0}) - .field(fieldName) + return new PercentileRanksAggregationBuilder("hdr_percentiles", new double[] { 1.0 }).field(fieldName) .percentilesConfig(new PercentilesConfig.Hdr()); } @Override protected List getSupportedValuesSourceTypes() { // Note: this is the same list as Core, plus Analytics - return List.of(CoreValuesSourceType.NUMERIC, + return List.of( + CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN, - AnalyticsValuesSourceType.HISTOGRAM); + AnalyticsValuesSourceType.HISTOGRAM + ); } private BinaryDocValuesField getDocValue(String fieldName, double[] values) throws IOException { - DoubleHistogram histogram = new DoubleHistogram(3);//default + DoubleHistogram histogram = new DoubleHistogram(3);// default for (double value : values) { histogram.recordValue(value); } @@ -81,15 +82,14 @@ private BinaryDocValuesField getDocValue(String fieldName, double[] values) thro } public void testSimple() throws IOException { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { Document doc = new Document(); - doc.add(getDocValue("field", new double[] {3, 0.2, 10})); + doc.add(getDocValue("field", new double[] { 3, 0.2, 10 })); w.addDocument(doc); - PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.1, 0.5, 12}) - .field("field") - .method(PercentilesMethod.HDR); + PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.1, 0.5, 12 }) + .field("field") + .method(PercentilesMethod.HDR); MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", Collections.emptyMap()); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); @@ -106,7 +106,7 @@ public void testSimple() throws IOException { assertEquals(12, rank.getValue(), 0d); assertThat(rank.getPercent(), Matchers.equalTo(100d)); assertFalse(rankIterator.hasNext()); - assertTrue(AggregationInspectionHelper.hasValue((InternalHDRPercentileRanks)ranks)); + assertTrue(AggregationInspectionHelper.hasValue((InternalHDRPercentileRanks) ranks)); } } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentilesAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentilesAggregatorTests.java index 527578abca8fb..3208adda77bee 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentilesAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentilesAggregatorTests.java @@ -17,8 +17,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -52,81 +52,81 @@ protected List getSearchPlugins() { @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new PercentilesAggregationBuilder("hdr_percentiles") - .field(fieldName) - .percentilesConfig(new PercentilesConfig.Hdr()); + return new PercentilesAggregationBuilder("hdr_percentiles").field(fieldName).percentilesConfig(new PercentilesConfig.Hdr()); } @Override protected List getSupportedValuesSourceTypes() { // Note: this is the same list as Core, plus Analytics - return List.of(CoreValuesSourceType.NUMERIC, + return List.of( + CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN, - AnalyticsValuesSourceType.HISTOGRAM); + AnalyticsValuesSourceType.HISTOGRAM + ); } private BinaryDocValuesField getDocValue(String fieldName, double[] values) throws IOException { - DoubleHistogram histogram = new DoubleHistogram(3);//default - for (double value : values) { - histogram.recordValue(value); - } - BytesStreamOutput streamOutput = new BytesStreamOutput(); - DoubleHistogram.RecordedValues recordedValues = histogram.recordedValues(); - Iterator iterator = recordedValues.iterator(); - while (iterator.hasNext()) { - - DoubleHistogramIterationValue value = iterator.next(); - long count = value.getCountAtValueIteratedTo(); - if (count != 0) { - streamOutput.writeVInt(Math.toIntExact(count)); - double d = value.getValueIteratedTo(); - streamOutput.writeDouble(d); - } - - } - return new BinaryDocValuesField(fieldName, streamOutput.bytes().toBytesRef()); + DoubleHistogram histogram = new DoubleHistogram(3);// default + for (double value : values) { + histogram.recordValue(value); + } + BytesStreamOutput streamOutput = new BytesStreamOutput(); + DoubleHistogram.RecordedValues recordedValues = histogram.recordedValues(); + Iterator iterator = recordedValues.iterator(); + while (iterator.hasNext()) { + + DoubleHistogramIterationValue value = iterator.next(); + long count = value.getCountAtValueIteratedTo(); + if (count != 0) { + streamOutput.writeVInt(Math.toIntExact(count)); + double d = value.getValueIteratedTo(); + streamOutput.writeDouble(d); + } + + } + return new BinaryDocValuesField(fieldName, streamOutput.bytes().toBytesRef()); } public void testNoMatchingField() throws IOException { - testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(getDocValue("wrong_number", new double[]{7, 1}))); - }, hdr -> { - //assertEquals(0L, hdr.state.getTotalCount()); + testCase(new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(getDocValue("wrong_number", new double[] { 7, 1 }))); }, hdr -> { + // assertEquals(0L, hdr.state.getTotalCount()); assertFalse(AggregationInspectionHelper.hasValue(hdr)); }); } public void testEmptyField() throws IOException { - testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(getDocValue("number", new double[0]))); - }, hdr -> { - assertFalse(AggregationInspectionHelper.hasValue(hdr)); - }); + testCase( + new MatchAllDocsQuery(), + iw -> { iw.addDocument(singleton(getDocValue("number", new double[0]))); }, + hdr -> { assertFalse(AggregationInspectionHelper.hasValue(hdr)); } + ); } public void testSomeMatchesBinaryDocValues() throws IOException { - testCase(new DocValuesFieldExistsQuery("number"), iw -> { - iw.addDocument(singleton(getDocValue("number", new double[]{60, 40, 20, 10}))); - }, hdr -> { - //assertEquals(4L, hdr.state.getTotalCount()); - double approximation = 0.05d; - assertEquals(10.0d, hdr.percentile(25), approximation); - assertEquals(20.0d, hdr.percentile(50), approximation); - assertEquals(40.0d, hdr.percentile(75), approximation); - assertEquals(60.0d, hdr.percentile(99), approximation); - assertTrue(AggregationInspectionHelper.hasValue(hdr)); - }); + testCase( + new DocValuesFieldExistsQuery("number"), + iw -> { iw.addDocument(singleton(getDocValue("number", new double[] { 60, 40, 20, 10 }))); }, + hdr -> { + // assertEquals(4L, hdr.state.getTotalCount()); + double approximation = 0.05d; + assertEquals(10.0d, hdr.percentile(25), approximation); + assertEquals(20.0d, hdr.percentile(50), approximation); + assertEquals(40.0d, hdr.percentile(75), approximation); + assertEquals(60.0d, hdr.percentile(99), approximation); + assertTrue(AggregationInspectionHelper.hasValue(hdr)); + } + ); } public void testSomeMatchesMultiBinaryDocValues() throws IOException { testCase(new DocValuesFieldExistsQuery("number"), iw -> { - iw.addDocument(singleton(getDocValue("number", new double[]{60, 40, 20, 10}))); - iw.addDocument(singleton(getDocValue("number", new double[]{60, 40, 20, 10}))); - iw.addDocument(singleton(getDocValue("number", new double[]{60, 40, 20, 10}))); - iw.addDocument(singleton(getDocValue("number", new double[]{60, 40, 20, 10}))); + iw.addDocument(singleton(getDocValue("number", new double[] { 60, 40, 20, 10 }))); + iw.addDocument(singleton(getDocValue("number", new double[] { 60, 40, 20, 10 }))); + iw.addDocument(singleton(getDocValue("number", new double[] { 60, 40, 20, 10 }))); + iw.addDocument(singleton(getDocValue("number", new double[] { 60, 40, 20, 10 }))); }, hdr -> { - //assertEquals(16L, hdr.state.getTotalCount()); + // assertEquals(16L, hdr.state.getTotalCount()); double approximation = 0.05d; assertEquals(10.0d, hdr.percentile(25), approximation); assertEquals(20.0d, hdr.percentile(50), approximation); @@ -136,8 +136,8 @@ public void testSomeMatchesMultiBinaryDocValues() throws IOException { }); } - private void testCase(Query query, CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) + throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { buildIndex.accept(indexWriter); @@ -146,8 +146,8 @@ private void testCase(Query query, CheckedConsumer { - iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] {3, 1.2, 10}))); - iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] {5.3, 6, 20}))); + iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] { 3, 1.2, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] { 5.3, 6, 20 }))); }, avg -> { assertEquals(Double.NaN, avg.getValue(), 0d); assertFalse(AggregationInspectionHelper.hasValue(avg)); @@ -62,9 +62,9 @@ public void testNoMatchingField() throws IOException { public void testSimpleHistogram() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10}))); - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 6, 20}))); - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 1, 90}))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 6, 20 }))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 1, 90 }))); }, avg -> { assertEquals(12.0463d, avg.getValue(), 0.01d); assertTrue(AggregationInspectionHelper.hasValue(avg)); @@ -73,25 +73,35 @@ public void testSimpleHistogram() throws IOException { public void testQueryFiltering() throws IOException { testCase(new TermQuery(new Term("match", "yes")), iw -> { - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 20})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 20 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "no", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "no", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "no", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "no", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 1, 90})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 1, 90 }) + ) ); }, avg -> { assertEquals(12.651d, avg.getValue(), 0.01d); @@ -99,9 +109,8 @@ public void testQueryFiltering() throws IOException { }); } - private void testCase(Query query, - CheckedConsumer indexer, - Consumer verify) throws IOException { + private void testCase(Query query, CheckedConsumer indexer, Consumer verify) + throws IOException { testCase(avg("_name").field(FIELD_NAME), query, indexer, verify, defaultFieldType()); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregatorTests.java index a9e019480dc3f..afb8781a42352 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregatorTests.java @@ -6,16 +6,6 @@ */ package org.elasticsearch.xpack.analytics.aggregations.metrics; -import static java.util.Collections.singleton; -import static org.elasticsearch.search.aggregations.AggregationBuilders.max; -import static org.elasticsearch.xpack.analytics.AnalyticsTestsUtils.histogramFieldDocValues; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.function.Consumer; - import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.RandomIndexWriter; @@ -37,6 +27,16 @@ import org.elasticsearch.xpack.analytics.aggregations.support.AnalyticsValuesSourceType; import org.elasticsearch.xpack.analytics.mapper.HistogramFieldMapper; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Consumer; + +import static java.util.Collections.singleton; +import static org.elasticsearch.search.aggregations.AggregationBuilders.max; +import static org.elasticsearch.xpack.analytics.AnalyticsTestsUtils.histogramFieldDocValues; + public class HistoBackedMaxAggregatorTests extends AggregatorTestCase { private static final String FIELD_NAME = "field"; @@ -52,8 +52,8 @@ public void testNoDocs() throws IOException { public void testNoMatchingField() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] {3, 1.2, 10}))); - iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] {5.3, 6, 20}))); + iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] { 3, 1.2, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] { 5.3, 6, 20 }))); }, max -> { assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0d); assertFalse(AggregationInspectionHelper.hasValue(max)); @@ -62,9 +62,9 @@ public void testNoMatchingField() throws IOException { public void testSimpleHistogram() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10}))); - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 6, 20}))); - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 1, 90}))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 6, 20 }))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 1, 90 }))); }, max -> { assertEquals(90d, max.getValue(), 0.01d); assertTrue(AggregationInspectionHelper.hasValue(max)); @@ -73,25 +73,35 @@ public void testSimpleHistogram() throws IOException { public void testQueryFiltering() throws IOException { testCase(new TermQuery(new Term("match", "yes")), iw -> { - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 20})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 20 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "no", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {-34, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "no", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { -34, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "no", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 100})) + iw.addDocument( + Arrays.asList( + new StringField("match", "no", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 100 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 1, 90})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 1, 90 }) + ) ); }, min -> { assertEquals(90d, min.getValue(), 0.01d); @@ -99,9 +109,8 @@ public void testQueryFiltering() throws IOException { }); } - private void testCase(Query query, - CheckedConsumer indexer, - Consumer verify) throws IOException { + private void testCase(Query query, CheckedConsumer indexer, Consumer verify) + throws IOException { testCase(max("_name").field(FIELD_NAME), query, indexer, verify, defaultFieldType()); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregatorTests.java index 1122761fcfc97..67571d50eaf6a 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregatorTests.java @@ -6,16 +6,6 @@ */ package org.elasticsearch.xpack.analytics.aggregations.metrics; -import static java.util.Collections.singleton; -import static org.elasticsearch.search.aggregations.AggregationBuilders.min; -import static org.elasticsearch.xpack.analytics.AnalyticsTestsUtils.histogramFieldDocValues; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.function.Consumer; - import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.RandomIndexWriter; @@ -37,6 +27,16 @@ import org.elasticsearch.xpack.analytics.aggregations.support.AnalyticsValuesSourceType; import org.elasticsearch.xpack.analytics.mapper.HistogramFieldMapper; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Consumer; + +import static java.util.Collections.singleton; +import static org.elasticsearch.search.aggregations.AggregationBuilders.min; +import static org.elasticsearch.xpack.analytics.AnalyticsTestsUtils.histogramFieldDocValues; + public class HistoBackedMinAggregatorTests extends AggregatorTestCase { private static final String FIELD_NAME = "field"; @@ -52,8 +52,8 @@ public void testNoDocs() throws IOException { public void testNoMatchingField() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] {3, 1.2, 10}))); - iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] {5.3, 6, 20}))); + iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] { 3, 1.2, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] { 5.3, 6, 20 }))); }, min -> { assertEquals(Double.POSITIVE_INFINITY, min.getValue(), 0d); assertFalse(AggregationInspectionHelper.hasValue(min)); @@ -62,9 +62,9 @@ public void testNoMatchingField() throws IOException { public void testSimpleHistogram() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10}))); - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 6, 20}))); - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 1, 90}))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 6, 20 }))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 1, 90 }))); }, min -> { assertEquals(-10d, min.getValue(), 0.01d); assertTrue(AggregationInspectionHelper.hasValue(min)); @@ -73,25 +73,35 @@ public void testSimpleHistogram() throws IOException { public void testQueryFiltering() throws IOException { testCase(new TermQuery(new Term("match", "yes")), iw -> { - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 20})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 20 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "no", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {-34, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "no", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { -34, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "no", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "no", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 1, 90})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 1, 90 }) + ) ); }, min -> { assertEquals(-10d, min.getValue(), 0.01d); @@ -99,9 +109,8 @@ public void testQueryFiltering() throws IOException { }); } - private void testCase(Query query, - CheckedConsumer indexer, - Consumer verify) throws IOException { + private void testCase(Query query, CheckedConsumer indexer, Consumer verify) + throws IOException { testCase(min("_name").field(FIELD_NAME), query, indexer, verify, defaultFieldType()); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregatorTests.java index 8d93d307981b7..ede881cba8954 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregatorTests.java @@ -6,16 +6,6 @@ */ package org.elasticsearch.xpack.analytics.aggregations.metrics; -import static java.util.Collections.singleton; -import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; -import static org.elasticsearch.xpack.analytics.AnalyticsTestsUtils.histogramFieldDocValues; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.function.Consumer; - import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.RandomIndexWriter; @@ -37,6 +27,16 @@ import org.elasticsearch.xpack.analytics.aggregations.support.AnalyticsValuesSourceType; import org.elasticsearch.xpack.analytics.mapper.HistogramFieldMapper; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Consumer; + +import static java.util.Collections.singleton; +import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; +import static org.elasticsearch.xpack.analytics.AnalyticsTestsUtils.histogramFieldDocValues; + public class HistoBackedSumAggregatorTests extends AggregatorTestCase { private static final String FIELD_NAME = "field"; @@ -52,8 +52,8 @@ public void testNoDocs() throws IOException { public void testNoMatchingField() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] {3, 1.2, 10}))); - iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] {5.3, 6, 20}))); + iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] { 3, 1.2, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] { 5.3, 6, 20 }))); }, sum -> { assertEquals(0L, sum.getValue(), 0d); assertFalse(AggregationInspectionHelper.hasValue(sum)); @@ -62,9 +62,9 @@ public void testNoMatchingField() throws IOException { public void testSimpleHistogram() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10}))); - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 6, 20}))); - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 1, 90}))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 6, 20 }))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 1, 90 }))); }, sum -> { assertEquals(132.51d, sum.getValue(), 0.01d); assertTrue(AggregationInspectionHelper.hasValue(sum)); @@ -73,25 +73,35 @@ public void testSimpleHistogram() throws IOException { public void testQueryFiltering() throws IOException { testCase(new TermQuery(new Term("match", "yes")), iw -> { - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 20})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 20 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "no", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "no", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "no", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "no", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 1, 90})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 1, 90 }) + ) ); }, sum -> { assertEquals(126.51d, sum.getValue(), 0.01d); @@ -99,9 +109,8 @@ public void testQueryFiltering() throws IOException { }); } - private void testCase(Query query, - CheckedConsumer indexer, - Consumer verify) throws IOException { + private void testCase(Query query, CheckedConsumer indexer, Consumer verify) + throws IOException { testCase(sum("_name").field(FIELD_NAME), query, indexer, verify, defaultFieldType()); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregatorTests.java index 332fac6396f20..337ace705ef0f 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregatorTests.java @@ -6,16 +6,6 @@ */ package org.elasticsearch.xpack.analytics.aggregations.metrics; -import static java.util.Collections.singleton; -import static org.elasticsearch.search.aggregations.AggregationBuilders.count; -import static org.elasticsearch.xpack.analytics.AnalyticsTestsUtils.histogramFieldDocValues; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.function.Consumer; - import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.RandomIndexWriter; @@ -37,6 +27,16 @@ import org.elasticsearch.xpack.analytics.aggregations.support.AnalyticsValuesSourceType; import org.elasticsearch.xpack.analytics.mapper.HistogramFieldMapper; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Consumer; + +import static java.util.Collections.singleton; +import static org.elasticsearch.search.aggregations.AggregationBuilders.count; +import static org.elasticsearch.xpack.analytics.AnalyticsTestsUtils.histogramFieldDocValues; + public class HistoBackedValueCountAggregatorTests extends AggregatorTestCase { private static final String FIELD_NAME = "field"; @@ -52,8 +52,8 @@ public void testNoDocs() throws IOException { public void testNoMatchingField() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] {3, 1.2, 10}))); - iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] {5.3, 6, 20}))); + iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] { 3, 1.2, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues("wrong_field", new double[] { 5.3, 6, 20 }))); }, count -> { assertEquals(0L, count.getValue()); assertFalse(AggregationInspectionHelper.hasValue(count)); @@ -62,9 +62,9 @@ public void testNoMatchingField() throws IOException { public void testSimpleHistogram() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10}))); - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 6, 20}))); - iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 1, 90}))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 6, 20 }))); + iw.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 1, 90 }))); }, count -> { assertEquals(11, count.getValue()); assertTrue(AggregationInspectionHelper.hasValue(count)); @@ -73,25 +73,35 @@ public void testSimpleHistogram() throws IOException { public void testQueryFiltering() throws IOException { testCase(new TermQuery(new Term("match", "yes")), iw -> { - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {5.3, 6, 20})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 5.3, 6, 20 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "no", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "no", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "no", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {3, 1.2, 10})) + iw.addDocument( + Arrays.asList( + new StringField("match", "no", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { 3, 1.2, 10 }) + ) ); - iw.addDocument(Arrays.asList( - new StringField("match", "yes", Field.Store.NO), - histogramFieldDocValues(FIELD_NAME, new double[] {-10, 0.01, 1, 90})) + iw.addDocument( + Arrays.asList( + new StringField("match", "yes", Field.Store.NO), + histogramFieldDocValues(FIELD_NAME, new double[] { -10, 0.01, 1, 90 }) + ) ); }, count -> { assertEquals(10, count.getValue()); @@ -99,10 +109,8 @@ public void testQueryFiltering() throws IOException { }); } - private void testCase( - Query query, - CheckedConsumer indexer, - Consumer verify) throws IOException { + private void testCase(Query query, CheckedConsumer indexer, Consumer verify) + throws IOException { testCase(count("_name").field(FIELD_NAME), query, indexer, verify, defaultFieldType()); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java index acf1f14f79eca..4d4d88a6c2f4c 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.analytics.aggregations.metrics; - import com.tdunning.math.stats.Centroid; import org.HdrHistogram.DoubleHistogram; @@ -37,41 +36,38 @@ import java.util.Iterator; import java.util.List; - public class HistogramPercentileAggregationTests extends ESSingleNodeTestCase { public void testHDRHistogram() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("data") - .field("type", "double") - .endObject() - .endObject() - .endObject() + .startObject("_doc") + .startObject("properties") + .startObject("data") + .field("type", "double") + .endObject() + .endObject() + .endObject() .endObject(); createIndex("raw"); PutMappingRequest request = new PutMappingRequest("raw").source(xContentBuilder); client().admin().indices().putMapping(request).actionGet(); - XContentBuilder xContentBuilder2 = XContentFactory.jsonBuilder() .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("data") - .field("type", "histogram") - .endObject() - .endObject() - .endObject() + .startObject("_doc") + .startObject("properties") + .startObject("data") + .field("type", "histogram") + .endObject() + .endObject() + .endObject() .endObject(); createIndex("pre_agg"); PutMappingRequest request2 = new PutMappingRequest("pre_agg").source(xContentBuilder2); client().admin().indices().putMapping(request2).actionGet(); - int numberOfSignificantValueDigits = TestUtil.nextInt(random(), 1, 5); DoubleHistogram histogram = new DoubleHistogram(numberOfSignificantValueDigits); BulkRequest bulkRequest = new BulkRequest(); @@ -79,12 +75,9 @@ public void testHDRHistogram() throws Exception { int numDocs = 10000; int frq = 1000; - for (int i =0; i < numDocs; i ++) { - double value = random().nextDouble(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("data", value) - .endObject(); + for (int i = 0; i < numDocs; i++) { + double value = random().nextDouble(); + XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("data", value).endObject(); bulkRequest.add(new IndexRequest("raw").source(doc)); histogram.recordValue(value); if ((i + 1) % frq == 0) { @@ -100,10 +93,10 @@ public void testHDRHistogram() throws Exception { } XContentBuilder preAggDoc = XContentFactory.jsonBuilder() .startObject() - .startObject("data") - .field("values", values.toArray(new Double[values.size()])) - .field("counts", counts.toArray(new Integer[counts.size()])) - .endObject() + .startObject("data") + .field("values", values.toArray(new Double[values.size()])) + .field("counts", counts.toArray(new Integer[counts.size()])) + .endObject() .endObject(); client().prepareIndex("pre_agg").setSource(preAggDoc).get(); histogram.reset(); @@ -117,17 +110,19 @@ public void testHDRHistogram() throws Exception { response = client().prepareSearch("pre_agg").get(); assertEquals(numDocs / frq, response.getHits().getTotalHits().value); - PercentilesAggregationBuilder builder = - AggregationBuilders.percentiles("agg").field("data").method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(numberOfSignificantValueDigits).percentiles(10); + PercentilesAggregationBuilder builder = AggregationBuilders.percentiles("agg") + .field("data") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(numberOfSignificantValueDigits) + .percentiles(10); SearchResponse responseRaw = client().prepareSearch("raw").addAggregation(builder).get(); SearchResponse responsePreAgg = client().prepareSearch("pre_agg").addAggregation(builder).get(); SearchResponse responseBoth = client().prepareSearch("pre_agg", "raw").addAggregation(builder).get(); - InternalHDRPercentiles percentilesRaw = responseRaw.getAggregations().get("agg"); - InternalHDRPercentiles percentilesPreAgg = responsePreAgg.getAggregations().get("agg"); - InternalHDRPercentiles percentilesBoth = responseBoth.getAggregations().get("agg"); + InternalHDRPercentiles percentilesRaw = responseRaw.getAggregations().get("agg"); + InternalHDRPercentiles percentilesPreAgg = responsePreAgg.getAggregations().get("agg"); + InternalHDRPercentiles percentilesBoth = responseBoth.getAggregations().get("agg"); for (int i = 1; i < 100; i++) { assertEquals(percentilesRaw.percentile(i), percentilesPreAgg.percentile(i), 0.0); assertEquals(percentilesRaw.percentile(i), percentilesBoth.percentile(i), 0.0); @@ -137,36 +132,35 @@ public void testHDRHistogram() throws Exception { private void setupTDigestHistogram(int compression) throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("inner") - .startObject("properties") - .startObject("data") - .field("type", "double") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() + .startObject("_doc") + .startObject("properties") + .startObject("inner") + .startObject("properties") + .startObject("data") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() .endObject(); createIndex("raw"); PutMappingRequest request = new PutMappingRequest("raw").source(xContentBuilder); client().admin().indices().putMapping(request).actionGet(); - XContentBuilder xContentBuilder2 = XContentFactory.jsonBuilder() .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("inner") - .startObject("properties") - .startObject("data") - .field("type", "histogram") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() + .startObject("_doc") + .startObject("properties") + .startObject("inner") + .startObject("properties") + .startObject("data") + .field("type", "histogram") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() .endObject(); createIndex("pre_agg"); PutMappingRequest request2 = new PutMappingRequest("pre_agg").source(xContentBuilder2); @@ -178,13 +172,13 @@ private void setupTDigestHistogram(int compression) throws Exception { int numDocs = 10000; int frq = 1000; - for (int i =0; i < numDocs; i ++) { - double value = random().nextDouble(); + for (int i = 0; i < numDocs; i++) { + double value = random().nextDouble(); XContentBuilder doc = XContentFactory.jsonBuilder() .startObject() - .startObject("inner") - .field("data", value) - .endObject() + .startObject("inner") + .field("data", value) + .endObject() .endObject(); bulkRequest.add(new IndexRequest("raw").source(doc)); histogram.add(value); @@ -200,12 +194,12 @@ private void setupTDigestHistogram(int compression) throws Exception { } XContentBuilder preAggDoc = XContentFactory.jsonBuilder() .startObject() - .startObject("inner") - .startObject("data") - .field("values", values.toArray(new Double[values.size()])) - .field("counts", counts.toArray(new Integer[counts.size()])) - .endObject() - .endObject() + .startObject("inner") + .startObject("data") + .field("values", values.toArray(new Double[values.size()])) + .field("counts", counts.toArray(new Integer[counts.size()])) + .endObject() + .endObject() .endObject(); client().prepareIndex("pre_agg").setSource(preAggDoc).get(); histogram = new TDigestState(compression); @@ -224,9 +218,11 @@ public void testTDigestHistogram() throws Exception { int compression = TestUtil.nextInt(random(), 200, 300); setupTDigestHistogram(compression); - PercentilesAggregationBuilder builder = - AggregationBuilders.percentiles("agg").field("inner.data").method(PercentilesMethod.TDIGEST) - .compression(compression).percentiles(10, 25, 50, 75); + PercentilesAggregationBuilder builder = AggregationBuilders.percentiles("agg") + .field("inner.data") + .method(PercentilesMethod.TDIGEST) + .compression(compression) + .percentiles(10, 25, 50, 75); SearchResponse responseRaw = client().prepareSearch("raw").addAggregation(builder).get(); SearchResponse responsePreAgg = client().prepareSearch("pre_agg").addAggregation(builder).get(); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java index 8f90693eaec7e..dde993310557a 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java @@ -6,11 +6,6 @@ */ package org.elasticsearch.xpack.analytics.aggregations.metrics; -import java.io.IOException; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; - import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; @@ -35,6 +30,11 @@ import org.elasticsearch.xpack.analytics.mapper.HistogramFieldMapper; import org.hamcrest.Matchers; +import java.io.IOException; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + import static org.elasticsearch.xpack.analytics.AnalyticsTestsUtils.histogramFieldDocValues; public class TDigestPreAggregatedPercentileRanksAggregatorTests extends AggregatorTestCase { @@ -46,30 +46,30 @@ protected List getSearchPlugins() { @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new PercentileRanksAggregationBuilder("tdigest_percentiles", new double[]{1.0}) - .field(fieldName) + return new PercentileRanksAggregationBuilder("tdigest_percentiles", new double[] { 1.0 }).field(fieldName) .percentilesConfig(new PercentilesConfig.TDigest()); } @Override protected List getSupportedValuesSourceTypes() { // Note: this is the same list as Core, plus Analytics - return List.of(CoreValuesSourceType.NUMERIC, + return List.of( + CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN, - AnalyticsValuesSourceType.HISTOGRAM); + AnalyticsValuesSourceType.HISTOGRAM + ); } public void testSimple() throws IOException { - try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { Document doc = new Document(); - doc.add(histogramFieldDocValues("field", new double[] {3, 0.2, 10})); + doc.add(histogramFieldDocValues("field", new double[] { 3, 0.2, 10 })); w.addDocument(doc); - PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] {0.1, 0.5, 12}) - .field("field") - .method(PercentilesMethod.TDIGEST); + PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.1, 0.5, 12 }) + .field("field") + .method(PercentilesMethod.TDIGEST); MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", Collections.emptyMap()); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); @@ -90,7 +90,7 @@ public void testSimple() throws IOException { // https://github.com/elastic/elasticsearch/issues/14851 // assertThat(rank.getPercent(), Matchers.equalTo(100d)); assertFalse(rankIterator.hasNext()); - assertTrue(AggregationInspectionHelper.hasValue(((InternalTDigestPercentileRanks)ranks))); + assertTrue(AggregationInspectionHelper.hasValue(((InternalTDigestPercentileRanks) ranks))); } } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentilesAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentilesAggregatorTests.java index c3ea1ae515a34..c4810642d99b9 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentilesAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentilesAggregatorTests.java @@ -6,14 +6,6 @@ */ package org.elasticsearch.xpack.analytics.aggregations.metrics; -import static java.util.Collections.singleton; -import static org.elasticsearch.xpack.analytics.AnalyticsTestsUtils.histogramFieldDocValues; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.function.Consumer; - import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; @@ -39,6 +31,14 @@ import org.elasticsearch.xpack.analytics.aggregations.support.AnalyticsValuesSourceType; import org.elasticsearch.xpack.analytics.mapper.HistogramFieldMapper; +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.function.Consumer; + +import static java.util.Collections.singleton; +import static org.elasticsearch.xpack.analytics.AnalyticsTestsUtils.histogramFieldDocValues; + public class TDigestPreAggregatedPercentilesAggregatorTests extends AggregatorTestCase { @Override @@ -48,59 +48,63 @@ protected List getSearchPlugins() { @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new PercentilesAggregationBuilder("tdigest_percentiles") - .field(fieldName) - .percentilesConfig(new PercentilesConfig.TDigest()); + return new PercentilesAggregationBuilder("tdigest_percentiles").field(fieldName).percentilesConfig(new PercentilesConfig.TDigest()); } @Override protected List getSupportedValuesSourceTypes() { // Note: this is the same list as Core, plus Analytics - return List.of(CoreValuesSourceType.NUMERIC, + return List.of( + CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN, - AnalyticsValuesSourceType.HISTOGRAM); + AnalyticsValuesSourceType.HISTOGRAM + ); } public void testNoMatchingField() throws IOException { - testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(histogramFieldDocValues("wrong_number", new double[]{7, 1}))); - }, hdr -> { - //assertEquals(0L, hdr.state.getTotalCount()); - assertFalse(AggregationInspectionHelper.hasValue(hdr)); - }); + testCase( + new MatchAllDocsQuery(), + iw -> { iw.addDocument(singleton(histogramFieldDocValues("wrong_number", new double[] { 7, 1 }))); }, + hdr -> { + // assertEquals(0L, hdr.state.getTotalCount()); + assertFalse(AggregationInspectionHelper.hasValue(hdr)); + } + ); } public void testEmptyField() throws IOException { - testCase(new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(histogramFieldDocValues("number", new double[0]))); - }, hdr -> { - assertFalse(AggregationInspectionHelper.hasValue(hdr)); - }); + testCase( + new MatchAllDocsQuery(), + iw -> { iw.addDocument(singleton(histogramFieldDocValues("number", new double[0]))); }, + hdr -> { assertFalse(AggregationInspectionHelper.hasValue(hdr)); } + ); } public void testSomeMatchesBinaryDocValues() throws IOException { - testCase(new DocValuesFieldExistsQuery("number"), iw -> { - iw.addDocument(singleton(histogramFieldDocValues("number", new double[]{60, 40, 20, 10}))); - }, hdr -> { - //assertEquals(4L, hdr.state.getTotalCount()); - double approximation = 0.05d; - assertEquals(15.0d, hdr.percentile(25), approximation); - assertEquals(30.0d, hdr.percentile(50), approximation); - assertEquals(50.0d, hdr.percentile(75), approximation); - assertEquals(60.0d, hdr.percentile(99), approximation); - assertTrue(AggregationInspectionHelper.hasValue(hdr)); - }); + testCase( + new DocValuesFieldExistsQuery("number"), + iw -> { iw.addDocument(singleton(histogramFieldDocValues("number", new double[] { 60, 40, 20, 10 }))); }, + hdr -> { + // assertEquals(4L, hdr.state.getTotalCount()); + double approximation = 0.05d; + assertEquals(15.0d, hdr.percentile(25), approximation); + assertEquals(30.0d, hdr.percentile(50), approximation); + assertEquals(50.0d, hdr.percentile(75), approximation); + assertEquals(60.0d, hdr.percentile(99), approximation); + assertTrue(AggregationInspectionHelper.hasValue(hdr)); + } + ); } public void testSomeMatchesMultiBinaryDocValues() throws IOException { testCase(new DocValuesFieldExistsQuery("number"), iw -> { - iw.addDocument(singleton(histogramFieldDocValues("number", new double[]{60, 40, 20, 10}))); - iw.addDocument(singleton(histogramFieldDocValues("number", new double[]{60, 40, 20, 10}))); - iw.addDocument(singleton(histogramFieldDocValues("number", new double[]{60, 40, 20, 10}))); - iw.addDocument(singleton(histogramFieldDocValues("number", new double[]{60, 40, 20, 10}))); + iw.addDocument(singleton(histogramFieldDocValues("number", new double[] { 60, 40, 20, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues("number", new double[] { 60, 40, 20, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues("number", new double[] { 60, 40, 20, 10 }))); + iw.addDocument(singleton(histogramFieldDocValues("number", new double[] { 60, 40, 20, 10 }))); }, hdr -> { - //assertEquals(16L, hdr.state.getTotalCount()); + // assertEquals(16L, hdr.state.getTotalCount()); double approximation = 0.05d; assertEquals(15.0d, hdr.percentile(25), approximation); assertEquals(30.0d, hdr.percentile(50), approximation); @@ -110,8 +114,11 @@ public void testSomeMatchesMultiBinaryDocValues() throws IOException { }); } - private void testCase(Query query, CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testCase( + Query query, + CheckedConsumer buildIndex, + Consumer verify + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { buildIndex.accept(indexWriter); @@ -120,8 +127,8 @@ private void testCase(Query query, CheckedConsumer BoxplotAggregationBuilder.PARSER.apply(p, (String) n)))); + return new NamedXContentRegistry( + singletonList( + new NamedXContentRegistry.Entry( + BaseAggregationBuilder.class, + new ParseField(BoxplotAggregationBuilder.NAME), + (p, n) -> BoxplotAggregationBuilder.PARSER.apply(p, (String) n) + ) + ) + ); } @Override @@ -51,8 +56,7 @@ protected BoxplotAggregationBuilder doParseInstance(XContentParser parser) throw @Override protected BoxplotAggregationBuilder createTestInstance() { - BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder(aggregationName) - .field(randomAlphaOfLength(10)); + BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder(aggregationName).field(randomAlphaOfLength(10)); if (randomBoolean()) { aggregationBuilder.compression(randomDoubleBetween(0, 100, true)); } @@ -64,4 +68,3 @@ protected Writeable.Reader instanceReader() { return BoxplotAggregationBuilder::new; } } - diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java index 797d78e3bb577..bed0939510c6d 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java @@ -15,8 +15,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -55,7 +55,6 @@ public class BoxplotAggregatorTests extends AggregatorTestCase { /** Script to return the {@code _value} provided by aggs framework. */ public static final String VALUE_SCRIPT = "_value"; - @Override protected List getSearchPlugins() { return List.of(new AnalyticsPlugin()); @@ -77,9 +76,7 @@ protected ScriptService getMockScriptService() { scripts.put(VALUE_SCRIPT, vars -> ((Number) vars.get("_value")).doubleValue() + 1); - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, - scripts, - Collections.emptyMap()); + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, scripts, Collections.emptyMap()); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); @@ -180,7 +177,7 @@ public void testMissingField() throws IOException { iw.addDocument(singleton(new NumericDocValuesField("other", 4))); iw.addDocument(singleton(new NumericDocValuesField("other", 5))); iw.addDocument(singleton(new NumericDocValuesField("number", 0))); - }, (Consumer) boxplot -> { + }, (Consumer) boxplot -> { assertEquals(0, boxplot.getMin(), 0); assertEquals(10, boxplot.getMax(), 0); assertEquals(10, boxplot.getQ1(), 0); @@ -190,8 +187,7 @@ public void testMissingField() throws IOException { } public void testUnmappedWithMissingField() throws IOException { - BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot") - .field("does_not_exist").missing(0L); + BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("does_not_exist").missing(0L); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); @@ -212,56 +208,54 @@ public void testUnsupportedType() { MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("not_a_number"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); - }, (Consumer) boxplot -> { - fail("Should have thrown exception"); - }, fieldType)); - assertEquals(e.getMessage(), "Field [not_a_number] of type [keyword] " + - "is not supported for aggregation [boxplot]"); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> testCase( + aggregationBuilder, + new MatchAllDocsQuery(), + iw -> { iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); }, + (Consumer) boxplot -> { fail("Should have thrown exception"); }, + fieldType + ) + ); + assertEquals(e.getMessage(), "Field [not_a_number] of type [keyword] " + "is not supported for aggregation [boxplot]"); } public void testBadMissingField() { - BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("number") - .missing("not_a_number"); + BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("number").missing("not_a_number"); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - expectThrows(NumberFormatException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField("number", 2))); - iw.addDocument(singleton(new NumericDocValuesField("number", 2))); - iw.addDocument(singleton(new NumericDocValuesField("number", 3))); - iw.addDocument(singleton(new NumericDocValuesField("number", 4))); - iw.addDocument(singleton(new NumericDocValuesField("number", 5))); - iw.addDocument(singleton(new NumericDocValuesField("number", 10))); - }, (Consumer) boxplot -> { - fail("Should have thrown exception"); - }, fieldType)); + expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField("number", 2))); + iw.addDocument(singleton(new NumericDocValuesField("number", 2))); + iw.addDocument(singleton(new NumericDocValuesField("number", 3))); + iw.addDocument(singleton(new NumericDocValuesField("number", 4))); + iw.addDocument(singleton(new NumericDocValuesField("number", 5))); + iw.addDocument(singleton(new NumericDocValuesField("number", 10))); + }, (Consumer) boxplot -> { fail("Should have thrown exception"); }, fieldType)); } public void testUnmappedWithBadMissingField() { - BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot") - .field("does_not_exist").missing("not_a_number"); + BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("does_not_exist") + .missing("not_a_number"); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); - expectThrows(NumberFormatException.class, - () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new NumericDocValuesField("number", 2))); - iw.addDocument(singleton(new NumericDocValuesField("number", 2))); - iw.addDocument(singleton(new NumericDocValuesField("number", 3))); - iw.addDocument(singleton(new NumericDocValuesField("number", 4))); - iw.addDocument(singleton(new NumericDocValuesField("number", 5))); - iw.addDocument(singleton(new NumericDocValuesField("number", 10))); - }, (Consumer) boxplot -> { - fail("Should have thrown exception"); - }, fieldType)); + expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField("number", 2))); + iw.addDocument(singleton(new NumericDocValuesField("number", 2))); + iw.addDocument(singleton(new NumericDocValuesField("number", 3))); + iw.addDocument(singleton(new NumericDocValuesField("number", 4))); + iw.addDocument(singleton(new NumericDocValuesField("number", 5))); + iw.addDocument(singleton(new NumericDocValuesField("number", 10))); + }, (Consumer) boxplot -> { fail("Should have thrown exception"); }, fieldType)); } public void testEmptyBucket() throws IOException { - HistogramAggregationBuilder histogram = new HistogramAggregationBuilder("histo").field("number").interval(10).minDocCount(0) + HistogramAggregationBuilder histogram = new HistogramAggregationBuilder("histo").field("number") + .interval(10) + .minDocCount(0) .subAggregation(new BoxplotAggregationBuilder("boxplot").field("number")); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); @@ -301,8 +295,7 @@ public void testEmptyBucket() throws IOException { } public void testFormatter() throws IOException { - BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("number") - .format("0000.0"); + BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("number").format("0000.0"); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); @@ -327,8 +320,9 @@ public void testFormatter() throws IOException { } public void testGetProperty() throws IOException { - GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global") - .subAggregation(new BoxplotAggregationBuilder("boxplot").field("number")); + GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( + new BoxplotAggregationBuilder("boxplot").field("number") + ); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); @@ -352,8 +346,7 @@ public void testGetProperty() throws IOException { } public void testValueScript() throws IOException { - BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot") - .field("number") + BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); @@ -371,8 +364,7 @@ public void testValueScript() throws IOException { } public void testValueScriptUnmapped() throws IOException { - BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot") - .field("does_not_exist") + BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("does_not_exist") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); @@ -390,8 +382,7 @@ public void testValueScriptUnmapped() throws IOException { } public void testValueScriptUnmappedMissing() throws IOException { - BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot") - .field("does_not_exist") + BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("does_not_exist") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())) .missing(1.0); @@ -410,9 +401,8 @@ public void testValueScriptUnmappedMissing() throws IOException { }, fieldType); } - private void testCase(Query query, - CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) + throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("number"); testCase(aggregationBuilder, query, buildIndex, verify, fieldType); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplotTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplotTests.java index 38955ab105fd2..cbc720ae25057 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplotTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplotTests.java @@ -7,12 +7,12 @@ package org.elasticsearch.xpack.analytics.boxplot; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregation; @@ -102,12 +102,13 @@ protected InternalBoxplot mutateInstance(InternalBoxplot instance) { @Override protected List getNamedXContents() { - return CollectionUtils.appendToCopy(super.getNamedXContents(), new NamedXContentRegistry.Entry(Aggregation.class, - new ParseField(BoxplotAggregationBuilder.NAME), - (p, c) -> { - assumeTrue("There is no ParsedBoxlot yet", false); - return null; - })); + return CollectionUtils.appendToCopy( + super.getNamedXContents(), + new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(BoxplotAggregationBuilder.NAME), (p, c) -> { + assumeTrue("There is no ParsedBoxlot yet", false); + return null; + }) + ); } public void testIQR() { diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java index e0a105820123f..ca2b48550e77d 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java @@ -16,8 +16,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -44,19 +44,20 @@ public class CumulativeCardinalityAggregatorTests extends AggregatorTestCase { private static final String VALUE_FIELD = "value_field"; private static final List datasetTimes = Arrays.asList( - "2017-01-01T01:07:45", //1 - "2017-01-01T03:43:34", //1 - "2017-01-03T04:11:00", //3 - "2017-01-03T05:11:31", //1 - "2017-01-05T08:24:05", //5 - "2017-01-05T13:09:32", //1 - "2017-01-07T13:47:43", //7 - "2017-01-08T16:14:34", //1 - "2017-01-09T17:09:50", //9 - "2017-01-09T22:55:46");//10 - - private static final List datasetValues = Arrays.asList(1,1,3,1,5,1,7,1,9,10); - private static final List cumulativeCardinality = Arrays.asList(1.0,1.0,2.0,2.0,3.0,3.0,4.0,4.0,6.0); + "2017-01-01T01:07:45", // 1 + "2017-01-01T03:43:34", // 1 + "2017-01-03T04:11:00", // 3 + "2017-01-03T05:11:31", // 1 + "2017-01-05T08:24:05", // 5 + "2017-01-05T13:09:32", // 1 + "2017-01-07T13:47:43", // 7 + "2017-01-08T16:14:34", // 1 + "2017-01-09T17:09:50", // 9 + "2017-01-09T22:55:46" + );// 10 + + private static final List datasetValues = Arrays.asList(1, 1, 3, 1, 5, 1, 7, 1, 9, 10); + private static final List cumulativeCardinality = Arrays.asList(1.0, 1.0, 2.0, 2.0, 3.0, 3.0, 4.0, 4.0, 6.0); public void testSimple() throws IOException { @@ -68,12 +69,14 @@ public void testSimple() throws IOException { aggBuilder.subAggregation(new CumulativeCardinalityPipelineAggregationBuilder("cumulative_card", "the_cardinality")); executeTestCase(query, aggBuilder, histogram -> { - assertEquals(9, ((Histogram)histogram).getBuckets().size()); - List buckets = ((Histogram)histogram).getBuckets(); + assertEquals(9, ((Histogram) histogram).getBuckets().size()); + List buckets = ((Histogram) histogram).getBuckets(); int counter = 0; for (Histogram.Bucket bucket : buckets) { - assertThat(((InternalSimpleLongValue) (bucket.getAggregations().get("cumulative_card"))).value(), - equalTo(cumulativeCardinality.get(counter))); + assertThat( + ((InternalSimpleLongValue) (bucket.getAggregations().get("cumulative_card"))).value(), + equalTo(cumulativeCardinality.get(counter)) + ); counter += 1; } }); @@ -88,8 +91,8 @@ public void testAllNull() throws IOException { aggBuilder.subAggregation(new CumulativeCardinalityPipelineAggregationBuilder("cumulative_card", "the_cardinality")); executeTestCase(query, aggBuilder, histogram -> { - assertEquals(9, ((Histogram)histogram).getBuckets().size()); - List buckets = ((Histogram)histogram).getBuckets(); + assertEquals(9, ((Histogram) histogram).getBuckets().size()); + List buckets = ((Histogram) histogram).getBuckets(); for (Histogram.Bucket bucket : buckets) { assertThat(((InternalSimpleLongValue) (bucket.getAggregations().get("cumulative_card"))).value(), equalTo(0.0)); } @@ -104,10 +107,14 @@ public void testNonCardinalityAgg() { aggBuilder.subAggregation(new SumAggregationBuilder("the_sum").field("foo")); aggBuilder.subAggregation(new CumulativeCardinalityPipelineAggregationBuilder("cumulative_card", "the_sum")); - AggregationExecutionException e = expectThrows(AggregationExecutionException.class, - () -> executeTestCase(query, aggBuilder, histogram -> fail("Test should not have executed"))); - assertThat(e.getMessage(), equalTo("buckets_path must reference a cardinality aggregation, " + - "got: [InternalSum] at aggregation [the_sum]")); + AggregationExecutionException e = expectThrows( + AggregationExecutionException.class, + () -> executeTestCase(query, aggBuilder, histogram -> fail("Test should not have executed")) + ); + assertThat( + e.getMessage(), + equalTo("buckets_path must reference a cardinality aggregation, " + "got: [InternalSum] at aggregation [the_sum]") + ); } private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consumer verify) throws IOException { @@ -129,9 +136,12 @@ private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consume }); } - private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consumer verify, - CheckedConsumer setup) throws IOException { - + private void executeTestCase( + Query query, + AggregationBuilder aggBuilder, + Consumer verify, + CheckedConsumer setup + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { @@ -142,8 +152,7 @@ private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consume IndexSearcher indexSearcher = newSearcher(indexReader, true, true); DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(HISTO_FIELD); - MappedFieldType valueFieldType - = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); + MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); InternalAggregation histogram; histogram = searchAndReduce(indexSearcher, query, aggBuilder, fieldType, valueFieldType); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityTests.java index 91758a252f754..967c846749d2a 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityTests.java @@ -30,10 +30,13 @@ protected List plugins() { return singletonList(new SearchPlugin() { @Override public List getPipelineAggregations() { - return singletonList(new PipelineAggregationSpec( + return singletonList( + new PipelineAggregationSpec( CumulativeCardinalityPipelineAggregationBuilder.NAME, CumulativeCardinalityPipelineAggregationBuilder::new, - CumulativeCardinalityPipelineAggregationBuilder.PARSER)); + CumulativeCardinalityPipelineAggregationBuilder.PARSER + ) + ); } }); } @@ -42,18 +45,18 @@ public List getPipelineAggregations() { protected CumulativeCardinalityPipelineAggregationBuilder createTestAggregatorFactory() { String name = randomAlphaOfLengthBetween(3, 20); String bucketsPath = randomAlphaOfLengthBetween(3, 20); - CumulativeCardinalityPipelineAggregationBuilder builder = - new CumulativeCardinalityPipelineAggregationBuilder(name, bucketsPath); + CumulativeCardinalityPipelineAggregationBuilder builder = new CumulativeCardinalityPipelineAggregationBuilder(name, bucketsPath); if (randomBoolean()) { builder.format(randomAlphaOfLengthBetween(1, 10)); } return builder; } - public void testParentValidations() throws IOException { - CumulativeCardinalityPipelineAggregationBuilder builder = - new CumulativeCardinalityPipelineAggregationBuilder("name", randomAlphaOfLength(5)); + CumulativeCardinalityPipelineAggregationBuilder builder = new CumulativeCardinalityPipelineAggregationBuilder( + "name", + randomAlphaOfLength(5) + ); assertThat(validate(new HistogramAggregationBuilder("name"), builder), nullValue()); assertThat(validate(new DateHistogramAggregationBuilder("name"), builder), nullValue()); @@ -62,12 +65,20 @@ public void testParentValidations() throws IOException { // Mocked "test" agg, should fail validation AggregationBuilder stubParent = mock(AggregationBuilder.class); when(stubParent.getName()).thenReturn("name"); - assertThat(validate(stubParent, builder), equalTo( + assertThat( + validate(stubParent, builder), + equalTo( "Validation Failed: 1: cumulative_cardinality aggregation [name] must have a histogram, " - + "date_histogram or auto_date_histogram as parent;")); + + "date_histogram or auto_date_histogram as parent;" + ) + ); - assertThat(validate(emptyList(), builder), equalTo( + assertThat( + validate(emptyList(), builder), + equalTo( "Validation Failed: 1: cumulative_cardinality aggregation [name] must have a histogram, " - + "date_histogram or auto_date_histogram as parent but doesn't have a parent;")); + + "date_histogram or auto_date_histogram as parent but doesn't have a parent;" + ) + ); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java index b5809dfc099dc..cfca79f8ec324 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java @@ -25,7 +25,6 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; - public class HistogramFieldMapperTests extends MapperTestCase { @Override @@ -45,8 +44,7 @@ protected void minimalMapping(XContentBuilder b) throws IOException { @Override protected void registerParameters(ParameterChecker checker) throws IOException { - checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), - m -> assertTrue(((HistogramFieldMapper)m).ignoreMalformed())); + checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> assertTrue(((HistogramFieldMapper) m).ignoreMalformed())); } @Override @@ -107,9 +105,7 @@ public void testMissingFieldCounts() throws Exception { } public void testIgnoreMalformed() throws Exception { - DocumentMapper mapper = createDocumentMapper( - fieldMapping(b -> b.field("type", "histogram").field("ignore_malformed", true)) - ); + DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "histogram").field("ignore_malformed", true))); ParsedDocument doc = mapper.parse(source(b -> b.startObject("field").field("values", new double[] { 2, 2 }).endObject())); assertThat(doc.rootDoc().getField("pre_aggregated"), nullValue()); } @@ -157,7 +153,7 @@ public void testIgnoreMalformedSkipsObjects() throws Exception { { b.startObject("values"); { - b.field("values", new double[] {2, 2}); + b.field("values", new double[] { 2, 2 }); b.startObject("otherData"); { b.startObject("more").field("toto", 1).endObject(); @@ -165,10 +161,10 @@ public void testIgnoreMalformedSkipsObjects() throws Exception { b.endObject(); } b.endObject(); - b.field("counts", new double[] {2, 2}); + b.field("counts", new double[] { 2, 2 }); } b.endObject(); - b.field("otherField","value"); + b.field("otherField", "value"); })); assertThat(doc.rootDoc().getField("pre_aggregated"), nullValue()); assertThat(doc.rootDoc().getField("otherField"), notNullValue()); @@ -274,32 +270,26 @@ public void testCountIsLong() throws Exception { public void testValuesNotInOrder() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); SourceToParse source = source( - b -> b.field("field") - .startObject() - .field("counts", new int[] { 2, 8, 4 }) - .field("values", new double[] { 2, 3, 2 }) - .endObject() + b -> b.field("field").startObject().field("counts", new int[] { 2, 8, 4 }).field("values", new double[] { 2, 3, 2 }).endObject() ); Exception e = expectThrows(MapperParsingException.class, () -> mapper.parse(source)); - assertThat(e.getCause().getMessage(), containsString(" values must be in increasing order, " + - "got [2.0] but previous value was [3.0]")); + assertThat( + e.getCause().getMessage(), + containsString(" values must be in increasing order, " + "got [2.0] but previous value was [3.0]") + ); } public void testFieldNotObject() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); SourceToParse source = source(b -> b.field("field", "bah")); Exception e = expectThrows(MapperParsingException.class, () -> mapper.parse(source)); - assertThat(e.getCause().getMessage(), containsString("expecting token of type [START_OBJECT] " + - "but found [VALUE_STRING]")); + assertThat(e.getCause().getMessage(), containsString("expecting token of type [START_OBJECT] " + "but found [VALUE_STRING]")); } public void testNegativeCount() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); SourceToParse source = source( - b -> b.startObject("field") - .field("counts", new int[] { 2, 2, -3 }) - .field("values", new double[] { 2, 2, 3 }) - .endObject() + b -> b.startObject("field").field("counts", new int[] { 2, 2, -3 }).field("values", new double[] { 2, 2, 3 }).endObject() ); Exception e = expectThrows(MapperParsingException.class, () -> mapper.parse(source)); assertThat(e.getCause().getMessage(), containsString("[counts] elements must be >= 0 but got -3")); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesAbstractAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesAbstractAggregatorTests.java index 660f8bf4fdba7..734cf4e03d13d 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesAbstractAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesAbstractAggregatorTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.analytics.movingPercentiles; - import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.time.DateFormatters; @@ -21,7 +20,6 @@ import java.util.Arrays; import java.util.List; - public abstract class MovingPercentilesAbstractAggregatorTests extends AggregatorTestCase { protected static final String DATE_FIELD = "date"; @@ -48,24 +46,28 @@ public abstract class MovingPercentilesAbstractAggregatorTests extends Aggregato "2017-01-17T22:55:46", "2017-01-18T22:55:46", "2017-01-19T22:55:46", - "2017-01-20T22:55:46"); - + "2017-01-20T22:55:46" + ); public void testMatchAllDocs() throws IOException { check(randomIntBetween(0, 10), randomIntBetween(1, 25)); } private void check(int shift, int window) throws IOException { - MovingPercentilesPipelineAggregationBuilder builder = - new MovingPercentilesPipelineAggregationBuilder("MovingPercentiles", "percentiles", window); + MovingPercentilesPipelineAggregationBuilder builder = new MovingPercentilesPipelineAggregationBuilder( + "MovingPercentiles", + "percentiles", + window + ); builder.setShift(shift); Query query = new MatchAllDocsQuery(); DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("histo"); aggBuilder.calendarInterval(DateHistogramInterval.DAY).field(DATE_FIELD); - aggBuilder.subAggregation(new PercentilesAggregationBuilder("percentiles").field(VALUE_FIELD) - .percentilesConfig(getPercentileConfig())); + aggBuilder.subAggregation( + new PercentilesAggregationBuilder("percentiles").field(VALUE_FIELD).percentilesConfig(getPercentileConfig()) + ); aggBuilder.subAggregation(builder); executeTestCase(window, shift, query, aggBuilder); @@ -73,9 +75,8 @@ private void check(int shift, int window) throws IOException { protected abstract PercentilesConfig getPercentileConfig(); - protected abstract void executeTestCase(int window, int shift, Query query, - DateHistogramAggregationBuilder aggBuilder) throws IOException; - + protected abstract void executeTestCase(int window, int shift, Query query, DateHistogramAggregationBuilder aggBuilder) + throws IOException; protected int clamp(int index, int length) { if (index < 0) { diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java index 36b1c73734680..e81f3d30eb8b5 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java @@ -28,7 +28,6 @@ import java.io.IOException; - public class MovingPercentilesHDRAggregatorTests extends MovingPercentilesAbstractAggregatorTests { @Override @@ -37,8 +36,7 @@ protected PercentilesConfig getPercentileConfig() { } @Override - protected void executeTestCase(int window, int shift, Query query, - DateHistogramAggregationBuilder aggBuilder) throws IOException { + protected void executeTestCase(int window, int shift, Query query, DateHistogramAggregationBuilder aggBuilder) throws IOException { DoubleHistogram[] states = new DoubleHistogram[datasetTimes.size()]; try (Directory directory = newDirectory()) { @@ -49,7 +47,7 @@ protected void executeTestCase(int window, int shift, Query query, states[counter] = new DoubleHistogram(1); final int numberDocs = randomIntBetween(5, 50); long instant = asLong(date); - for (int i =0; i < numberDocs; i++) { + for (int i = 0; i < numberDocs; i++) { if (frequently()) { indexWriter.commit(); } @@ -69,12 +67,10 @@ protected void executeTestCase(int window, int shift, Query query, IndexSearcher indexSearcher = newSearcher(indexReader, true, true); DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggBuilder.field()); - MappedFieldType valueFieldType - = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.DOUBLE); + MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.DOUBLE); InternalDateHistogram histogram; - histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, - new MappedFieldType[]{fieldType, valueFieldType}); + histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, new MappedFieldType[] { fieldType, valueFieldType }); for (int i = 0; i < histogram.getBuckets().size(); i++) { InternalDateHistogram.Bucket bucket = histogram.getBuckets().get(i); InternalHDRPercentiles values = bucket.getAggregations().get("MovingPercentiles"); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java index cefa20af60d81..9d1a15a47dba8 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java @@ -26,10 +26,8 @@ import org.elasticsearch.search.aggregations.metrics.PercentilesConfig; import org.elasticsearch.search.aggregations.metrics.TDigestState; - import java.io.IOException; - public class MovingPercentilesTDigestAggregatorTests extends MovingPercentilesAbstractAggregatorTests { @Override @@ -38,8 +36,7 @@ protected PercentilesConfig getPercentileConfig() { } @Override - protected void executeTestCase(int window, int shift, Query query, - DateHistogramAggregationBuilder aggBuilder) throws IOException { + protected void executeTestCase(int window, int shift, Query query, DateHistogramAggregationBuilder aggBuilder) throws IOException { TDigestState[] states = new TDigestState[datasetTimes.size()]; try (Directory directory = newDirectory()) { @@ -50,7 +47,7 @@ protected void executeTestCase(int window, int shift, Query query, states[counter] = new TDigestState(50); final int numberDocs = randomIntBetween(5, 50); long instant = asLong(date); - for (int i =0; i < numberDocs; i++) { + for (int i = 0; i < numberDocs; i++) { if (frequently()) { indexWriter.commit(); } @@ -70,12 +67,10 @@ protected void executeTestCase(int window, int shift, Query query, IndexSearcher indexSearcher = newSearcher(indexReader, true, true); DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggBuilder.field()); - MappedFieldType valueFieldType - = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.DOUBLE); + MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.DOUBLE); InternalDateHistogram histogram; - histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, - new MappedFieldType[]{fieldType, valueFieldType}); + histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, new MappedFieldType[] { fieldType, valueFieldType }); for (int i = 0; i < histogram.getBuckets().size(); i++) { InternalDateHistogram.Bucket bucket = histogram.getBuckets().get(i); InternalTDigestPercentiles values = bucket.getAggregations().get("MovingPercentiles"); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTests.java index fa2963edf76fc..618d005fd7578 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTests.java @@ -31,10 +31,13 @@ protected List plugins() { return singletonList(new SearchPlugin() { @Override public List getPipelineAggregations() { - return singletonList(new PipelineAggregationSpec( + return singletonList( + new PipelineAggregationSpec( MovingPercentilesPipelineAggregationBuilder.NAME, MovingPercentilesPipelineAggregationBuilder::new, - MovingPercentilesPipelineAggregationBuilder.PARSER)); + MovingPercentilesPipelineAggregationBuilder.PARSER + ) + ); } }); } @@ -43,18 +46,23 @@ public List getPipelineAggregations() { protected MovingPercentilesPipelineAggregationBuilder createTestAggregatorFactory() { String name = randomAlphaOfLengthBetween(3, 20); String bucketsPath = randomAlphaOfLengthBetween(3, 20); - MovingPercentilesPipelineAggregationBuilder builder = - new MovingPercentilesPipelineAggregationBuilder(name, bucketsPath, TestUtil.nextInt(random(), 1, 10)); + MovingPercentilesPipelineAggregationBuilder builder = new MovingPercentilesPipelineAggregationBuilder( + name, + bucketsPath, + TestUtil.nextInt(random(), 1, 10) + ); if (randomBoolean()) { builder.setShift(randomIntBetween(0, 10)); } return builder; } - public void testParentValidations() throws IOException { - MovingPercentilesPipelineAggregationBuilder builder = - new MovingPercentilesPipelineAggregationBuilder("name", randomAlphaOfLength(5), TestUtil.nextInt(random(), 1, 10)); + MovingPercentilesPipelineAggregationBuilder builder = new MovingPercentilesPipelineAggregationBuilder( + "name", + randomAlphaOfLength(5), + TestUtil.nextInt(random(), 1, 10) + ); assertThat(validate(new HistogramAggregationBuilder("name"), builder), nullValue()); assertThat(validate(new DateHistogramAggregationBuilder("name"), builder), nullValue()); @@ -63,12 +71,20 @@ public void testParentValidations() throws IOException { // Mocked "test" agg, should fail validation AggregationBuilder stubParent = mock(AggregationBuilder.class); when(stubParent.getName()).thenReturn("name"); - assertThat(validate(stubParent, builder), equalTo( + assertThat( + validate(stubParent, builder), + equalTo( "Validation Failed: 1: moving_percentiles aggregation [name] must have a histogram, " - + "date_histogram or auto_date_histogram as parent;")); + + "date_histogram or auto_date_histogram as parent;" + ) + ); - assertThat(validate(emptyList(), builder), equalTo( + assertThat( + validate(emptyList(), builder), + equalTo( "Validation Failed: 1: moving_percentiles aggregation [name] must have a histogram, " - + "date_histogram or auto_date_histogram as parent but doesn't have a parent;")); + + "date_histogram or auto_date_histogram as parent but doesn't have a parent;" + ) + ); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java index c8d97a5b14011..26ce55da4c41b 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java @@ -7,27 +7,12 @@ package org.elasticsearch.xpack.analytics.multiterms; -import static org.elasticsearch.search.DocValueFormat.UNSIGNED_LONG_SHIFTED; -import static org.elasticsearch.xpack.analytics.multiterms.InternalMultiTerms.KeyConverter.DOUBLE; -import static org.elasticsearch.xpack.analytics.multiterms.InternalMultiTerms.KeyConverter.LONG; -import static org.elasticsearch.xpack.analytics.multiterms.InternalMultiTerms.KeyConverter.UNSIGNED_LONG; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.hasSize; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.ScriptService; @@ -42,6 +27,21 @@ import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.search.DocValueFormat.UNSIGNED_LONG_SHIFTED; +import static org.elasticsearch.xpack.analytics.multiterms.InternalMultiTerms.KeyConverter.DOUBLE; +import static org.elasticsearch.xpack.analytics.multiterms.InternalMultiTerms.KeyConverter.LONG; +import static org.elasticsearch.xpack.analytics.multiterms.InternalMultiTerms.KeyConverter.UNSIGNED_LONG; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; + public class InternalMultiTermsTests extends InternalAggregationTestCase { @Override diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilderTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilderTests.java index d1d92333b09e6..74f657e49608e 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilderTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilderTests.java @@ -7,21 +7,11 @@ package org.elasticsearch.xpack.analytics.multiterms; -import static org.elasticsearch.test.InternalAggregationTestCase.randomNumericDocValueFormat; -import static org.elasticsearch.xpack.analytics.multiterms.InternalMultiTermsTests.randomBucketOrder; -import static org.hamcrest.Matchers.hasSize; - -import java.io.IOException; -import java.time.ZoneId; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.Aggregator; @@ -32,6 +22,16 @@ import org.elasticsearch.test.AbstractSerializingTestCase; import org.junit.Before; +import java.io.IOException; +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.test.InternalAggregationTestCase.randomNumericDocValueFormat; +import static org.elasticsearch.xpack.analytics.multiterms.InternalMultiTermsTests.randomBucketOrder; +import static org.hamcrest.Matchers.hasSize; + public class MultiTermsAggregationBuilderTests extends AbstractSerializingTestCase { String aggregationName; @@ -61,12 +61,12 @@ static MultiValuesSourceFieldConfig randomTermConfig() { ? randomFrom(ValueType.STRING, ValueType.DOUBLE, ValueType.LONG, ValueType.DATE, ValueType.IP, ValueType.BOOLEAN) : null; return new MultiValuesSourceFieldConfig.Builder().setFieldName(field) - .setMissing(missing) - .setScript(null) - .setTimeZone(timeZone) - .setFormat(format) - .setUserValueTypeHint(userValueTypeHint) - .build(); + .setMissing(missing) + .setScript(null) + .setTimeZone(timeZone) + .setFormat(format) + .setUserValueTypeHint(userValueTypeHint) + .build(); } @Override diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregatorTests.java index 7fe19d996750d..926878d167ffb 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregatorTests.java @@ -21,9 +21,9 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java index 59be20dc67e20..396cad140a089 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java @@ -50,47 +50,56 @@ public class NormalizeAggregatorTests extends AggregatorTestCase { private static final String VALUE_FIELD = "value_field"; private static final List datasetTimes = Arrays.asList( - "2017-01-01T01:07:45", //1 - "2017-01-01T03:43:34", //1 - "2017-01-03T04:11:00", //3 - "2017-01-03T05:11:31", //3 - "2017-01-05T08:24:05", //5 - "2017-01-05T13:09:32", //5 - "2017-01-07T13:47:43", //7 - "2017-01-08T16:14:34", //8 - "2017-01-09T17:09:50", //9 - "2017-01-09T22:55:46");//9 + "2017-01-01T01:07:45", // 1 + "2017-01-01T03:43:34", // 1 + "2017-01-03T04:11:00", // 3 + "2017-01-03T05:11:31", // 3 + "2017-01-05T08:24:05", // 5 + "2017-01-05T13:09:32", // 5 + "2017-01-07T13:47:43", // 7 + "2017-01-08T16:14:34", // 8 + "2017-01-09T17:09:50", // 9 + "2017-01-09T22:55:46" + );// 9 private static final List datasetTerms = Arrays.asList( - "a", //1 - "a", //1 - "b", //2 - "b", //2 - "c", //3 - "c", //3 - "d", //4 - "e", //5 - "f", //6 - "f");//6 - - private static final List datasetValues = Arrays.asList(1,1,42,6,5,0,2,8,30,13); - private static final List datePercentOfSum = Arrays.asList(0.2,0.0,0.2,0.0,0.2,0.0,0.1,0.1,0.2); - private static final List termPercentOfSum = Arrays.asList(0.2,0.2,0.2,0.2,0.1,0.1); - private static final List rescaleOneHundred = Arrays.asList(0.0,Double.NaN,100.0,Double.NaN,6.521739130434782, - Double.NaN,0.0,13.043478260869565,89.1304347826087); + "a", // 1 + "a", // 1 + "b", // 2 + "b", // 2 + "c", // 3 + "c", // 3 + "d", // 4 + "e", // 5 + "f", // 6 + "f" + );// 6 + + private static final List datasetValues = Arrays.asList(1, 1, 42, 6, 5, 0, 2, 8, 30, 13); + private static final List datePercentOfSum = Arrays.asList(0.2, 0.0, 0.2, 0.0, 0.2, 0.0, 0.1, 0.1, 0.2); + private static final List termPercentOfSum = Arrays.asList(0.2, 0.2, 0.2, 0.2, 0.1, 0.1); + private static final List rescaleOneHundred = Arrays.asList( + 0.0, + Double.NaN, + 100.0, + Double.NaN, + 6.521739130434782, + Double.NaN, + 0.0, + 13.043478260869565, + 89.1304347826087 + ); public void testPercentOfTotalDocCount() throws IOException { DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("histo"); aggBuilder.calendarInterval(DateHistogramInterval.DAY).field(DATE_FIELD); - aggBuilder.subAggregation(new NormalizePipelineAggregationBuilder("normalized", null, "percent_of_sum", - List.of("_count"))); + aggBuilder.subAggregation(new NormalizePipelineAggregationBuilder("normalized", null, "percent_of_sum", List.of("_count"))); testCase(aggBuilder, (agg) -> { assertEquals(9, ((Histogram) agg).getBuckets().size()); List buckets = ((Histogram) agg).getBuckets(); for (int i = 0; i < buckets.size(); i++) { Histogram.Bucket bucket = buckets.get(i); - assertThat(((InternalSimpleValue) (bucket.getAggregations().get("normalized"))).value(), - equalTo(datePercentOfSum.get(i))); + assertThat(((InternalSimpleValue) (bucket.getAggregations().get("normalized"))).value(), equalTo(datePercentOfSum.get(i))); } }); } @@ -99,32 +108,28 @@ public void testValueMean() throws IOException { DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("histo"); aggBuilder.calendarInterval(DateHistogramInterval.DAY).field(DATE_FIELD); aggBuilder.subAggregation(new StatsAggregationBuilder("stats").field(VALUE_FIELD)); - aggBuilder.subAggregation(new NormalizePipelineAggregationBuilder("normalized", null, "rescale_0_100", - List.of("stats.sum"))); + aggBuilder.subAggregation(new NormalizePipelineAggregationBuilder("normalized", null, "rescale_0_100", List.of("stats.sum"))); testCase(aggBuilder, (agg) -> { assertEquals(9, ((Histogram) agg).getBuckets().size()); List buckets = ((Histogram) agg).getBuckets(); for (int i = 0; i < buckets.size(); i++) { Histogram.Bucket bucket = buckets.get(i); - assertThat(((InternalSimpleValue) (bucket.getAggregations().get("normalized"))).value(), - equalTo(rescaleOneHundred.get(i))); + assertThat(((InternalSimpleValue) (bucket.getAggregations().get("normalized"))).value(), equalTo(rescaleOneHundred.get(i))); } }); } public void testTermsAggParent() throws IOException { TermsAggregationBuilder aggBuilder = new TermsAggregationBuilder("terms").field(TERM_FIELD); - aggBuilder.subAggregation(new NormalizePipelineAggregationBuilder("normalized", null, "percent_of_sum", - List.of("_count"))); + aggBuilder.subAggregation(new NormalizePipelineAggregationBuilder("normalized", null, "percent_of_sum", List.of("_count"))); testCase(aggBuilder, (agg) -> { assertEquals(6, ((Terms) agg).getBuckets().size()); List buckets = ((Terms) agg).getBuckets(); for (int i = 0; i < buckets.size(); i++) { Terms.Bucket bucket = buckets.get(i); - assertThat(((InternalSimpleValue) (bucket.getAggregations().get("normalized"))).value(), - equalTo(termPercentOfSum.get(i))); + assertThat(((InternalSimpleValue) (bucket.getAggregations().get("normalized"))).value(), equalTo(termPercentOfSum.get(i))); } }); @@ -141,7 +146,8 @@ private void testCase(ValuesSourceAggregationBuilder aggBuilder, Consumer aggBuilder, Consumer plugins() { return singletonList(new SearchPlugin() { @Override public List getPipelineAggregations() { - return singletonList(new PipelineAggregationSpec( - NormalizePipelineAggregationBuilder.NAME, - NormalizePipelineAggregationBuilder::new, - NormalizePipelineAggregationBuilder.PARSER)); + return singletonList( + new PipelineAggregationSpec( + NormalizePipelineAggregationBuilder.NAME, + NormalizePipelineAggregationBuilder::new, + NormalizePipelineAggregationBuilder.PARSER + ) + ); } }); } @@ -36,17 +39,29 @@ public List getPipelineAggregations() { public void testInvalidNormalizer() { NormalizePipelineAggregationBuilder builder = createTestAggregatorFactory(); String invalidNormalizer = randomFrom(NormalizePipelineAggregationBuilder.NAME_MAP.keySet()) + randomAlphaOfLength(10); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> new NormalizePipelineAggregationBuilder(builder.getName(), builder.format(), invalidNormalizer, - List.of(builder.getBucketsPaths()))); + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> new NormalizePipelineAggregationBuilder( + builder.getName(), + builder.format(), + invalidNormalizer, + List.of(builder.getBucketsPaths()) + ) + ); assertThat(exception.getMessage(), equalTo("invalid method [" + invalidNormalizer + "]")); } public void testHasParentValidation() { NormalizePipelineAggregationBuilder builder = createTestAggregatorFactory(); - assertThat(validate(emptyList(), builder), CoreMatchers.equalTo( - "Validation Failed: 1: normalize aggregation [" + builder.getName() + "] must be declared inside" + - " of another aggregation;")); + assertThat( + validate(emptyList(), builder), + CoreMatchers.equalTo( + "Validation Failed: 1: normalize aggregation [" + + builder.getName() + + "] must be declared inside" + + " of another aggregation;" + ) + ); } @Override diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java index 595c79bca4f14..1d574d83e754e 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.analytics.rate; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregation; @@ -93,10 +93,12 @@ protected InternalRate mutateInstance(InternalRate instance) { @Override protected List getNamedXContents() { - return CollectionUtils.appendToCopy(super.getNamedXContents(), new NamedXContentRegistry.Entry( - Aggregation.class, new ParseField(RateAggregationBuilder.NAME), (p, c) -> { - assumeTrue("There is no ParsedRate yet", false); - return null; - })); + return CollectionUtils.appendToCopy( + super.getNamedXContents(), + new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(RateAggregationBuilder.NAME), (p, c) -> { + assumeTrue("There is no ParsedRate yet", false); + return null; + }) + ); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilderTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilderTests.java index 84b17b072b815..f8d70a300fdcb 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilderTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilderTests.java @@ -7,12 +7,12 @@ package org.elasticsearch.xpack.analytics.rate; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchModule; diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorTests.java index deb35fb17e9bc..2c44ed3843444 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorTests.java @@ -21,8 +21,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -404,48 +404,60 @@ public void testUnsupportedKeywordSandwich() throws IOException { .subAggregation(termsAggregationBuilder); IllegalArgumentException ex = expectThrows( - IllegalArgumentException.class, () -> testCase(dateHistogramAggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument( - doc( - "2010-03-11T01:07:45", - new NumericDocValuesField("val", 1), - new IntPoint("val", 1), - new SortedSetDocValuesField("term", new BytesRef("a")) - ) - ); - iw.addDocument( - doc( - "2010-03-12T01:07:45", - new NumericDocValuesField("val", 2), - new IntPoint("val", 2), - new SortedSetDocValuesField("term", new BytesRef("a")) - ) - ); - iw.addDocument( - doc( - "2010-04-01T03:43:34", - new NumericDocValuesField("val", 3), - new IntPoint("val", 3), - new SortedSetDocValuesField("term", new BytesRef("a")) - ) - ); - iw.addDocument( - doc( - "2010-04-27T03:43:34", - new NumericDocValuesField("val", 4), - new IntPoint("val", 4), - new SortedSetDocValuesField("term", new BytesRef("b")) - ) - ); - }, (Consumer) dh -> { - fail("Shouldn't be here"); - }, dateType, numType, keywordType)); + IllegalArgumentException.class, + () -> testCase(dateHistogramAggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument( + doc( + "2010-03-11T01:07:45", + new NumericDocValuesField("val", 1), + new IntPoint("val", 1), + new SortedSetDocValuesField("term", new BytesRef("a")) + ) + ); + iw.addDocument( + doc( + "2010-03-12T01:07:45", + new NumericDocValuesField("val", 2), + new IntPoint("val", 2), + new SortedSetDocValuesField("term", new BytesRef("a")) + ) + ); + iw.addDocument( + doc( + "2010-04-01T03:43:34", + new NumericDocValuesField("val", 3), + new IntPoint("val", 3), + new SortedSetDocValuesField("term", new BytesRef("a")) + ) + ); + iw.addDocument( + doc( + "2010-04-27T03:43:34", + new NumericDocValuesField("val", 4), + new IntPoint("val", 4), + new SortedSetDocValuesField("term", new BytesRef("b")) + ) + ); + }, (Consumer) dh -> { fail("Shouldn't be here"); }, dateType, numType, keywordType) + ); if (millisecondBasedRate) { - assertEquals("Cannot use non month-based rate unit [" + rate + "] with calendar interval histogram [" + - histogram + "] only month, quarter and year are supported for this histogram", ex.getMessage()); + assertEquals( + "Cannot use non month-based rate unit [" + + rate + + "] with calendar interval histogram [" + + histogram + + "] only month, quarter and year are supported for this histogram", + ex.getMessage() + ); } else { - assertEquals("Cannot use month-based rate unit [" + rate + "] with non-month based calendar interval histogram [" + - histogram + "] only week, day, hour, minute and second are supported for this histogram", ex.getMessage()); + assertEquals( + "Cannot use month-based rate unit [" + + rate + + "] with non-month based calendar interval histogram [" + + histogram + + "] only week, day, hour, minute and second are supported for this histogram", + ex.getMessage() + ); } } @@ -689,12 +701,19 @@ public void testFilterWithHistogramField() throws IOException { .subAggregation(rateAggregationBuilder); testCase(dateHistogramAggregationBuilder, new TermQuery(new Term("term", "a")), iw -> { - iw.addDocument(doc("2010-03-01T00:00:00", histogramFieldDocValues("val", new double[] { 1, 2 }), - new StringField("term", "a", Field.Store.NO))); - iw.addDocument(doc("2010-04-01T00:00:00", histogramFieldDocValues("val", new double[] { 3 }), - new StringField("term", "a", Field.Store.NO))); - iw.addDocument(doc("2010-04-01T00:00:00", histogramFieldDocValues("val", new double[] { 4 }), - new StringField("term", "b", Field.Store.NO))); + iw.addDocument( + doc( + "2010-03-01T00:00:00", + histogramFieldDocValues("val", new double[] { 1, 2 }), + new StringField("term", "a", Field.Store.NO) + ) + ); + iw.addDocument( + doc("2010-04-01T00:00:00", histogramFieldDocValues("val", new double[] { 3 }), new StringField("term", "a", Field.Store.NO)) + ); + iw.addDocument( + doc("2010-04-01T00:00:00", histogramFieldDocValues("val", new double[] { 4 }), new StringField("term", "b", Field.Store.NO)) + ); }, (Consumer) dh -> { assertThat(dh.getBuckets(), hasSize(2)); assertThat(((InternalRate) dh.getBuckets().get(0).getAggregations().asList().get(0)).value(), closeTo(3.0, 0.000001)); @@ -713,9 +732,14 @@ public void testModeWithoutField() { IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, - () -> testCase(dateHistogramAggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(doc("2010-03-12T01:07:45", new SortedNumericDocValuesField("val", 1))); - }, h -> { fail("Shouldn't be here"); }, dateType, numType) + () -> testCase( + dateHistogramAggregationBuilder, + new MatchAllDocsQuery(), + iw -> { iw.addDocument(doc("2010-03-12T01:07:45", new SortedNumericDocValuesField("val", 1))); }, + h -> { fail("Shouldn't be here"); }, + dateType, + numType + ) ); assertEquals("The mode parameter is only supported with field or script", ex.getMessage()); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStatsTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStatsTests.java index e1a4886de831e..c130ec850ac51 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStatsTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStatsTests.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.analytics.stringstats; import org.elasticsearch.client.analytics.ParsedStringStats; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregation; @@ -40,8 +40,14 @@ protected SearchPlugin registerPlugin() { @Override protected List getNamedXContents() { - return CollectionUtils.appendToCopy(super.getNamedXContents(), new NamedXContentRegistry.Entry(Aggregation.class, - new ParseField(StringStatsAggregationBuilder.NAME), (p, c) -> ParsedStringStats.PARSER.parse(p, (String) c))); + return CollectionUtils.appendToCopy( + super.getNamedXContents(), + new NamedXContentRegistry.Entry( + Aggregation.class, + new ParseField(StringStatsAggregationBuilder.NAME), + (p, c) -> ParsedStringStats.PARSER.parse(p, (String) c) + ) + ); } @Override @@ -68,45 +74,62 @@ private InternalStringStats createTestInstance(String name, Map } long count = randomLongBetween(1, maxCount); long totalLength = randomLongBetween(0, maxTotalLength); - return new InternalStringStats(name, count, totalLength, - between(0, Integer.MAX_VALUE), between(0, Integer.MAX_VALUE), randomCharOccurrences(), - randomBoolean(), DocValueFormat.RAW, metadata); + return new InternalStringStats( + name, + count, + totalLength, + between(0, Integer.MAX_VALUE), + between(0, Integer.MAX_VALUE), + randomCharOccurrences(), + randomBoolean(), + DocValueFormat.RAW, + metadata + ); } @Override protected InternalStringStats mutateInstance(InternalStringStats instance) throws IOException { - String name = instance.getName(); - long count = instance.getCount(); - long totalLength = instance.getTotalLength(); - int minLength = instance.getMinLength(); - int maxLength = instance.getMaxLength(); - Map charOccurrences = instance.getCharOccurrences(); - boolean showDistribution = instance.getShowDistribution(); - switch (between(0, 6)) { - case 0: - name = name + "a"; - break; - case 1: - count = randomValueOtherThan(count, () -> randomLongBetween(1, Long.MAX_VALUE)); - break; - case 2: - totalLength = randomValueOtherThan(totalLength, ESTestCase::randomNonNegativeLong); - break; - case 3: - minLength = randomValueOtherThan(minLength, () -> between(0, Integer.MAX_VALUE)); - break; - case 4: - maxLength = randomValueOtherThan(maxLength, () -> between(0, Integer.MAX_VALUE)); - break; - case 5: - charOccurrences = randomValueOtherThan(charOccurrences, this::randomCharOccurrences); - break; - case 6: - showDistribution = showDistribution == false; - break; - } - return new InternalStringStats(name, count, totalLength, minLength, maxLength, charOccurrences, showDistribution, - DocValueFormat.RAW, instance.getMetadata()); + String name = instance.getName(); + long count = instance.getCount(); + long totalLength = instance.getTotalLength(); + int minLength = instance.getMinLength(); + int maxLength = instance.getMaxLength(); + Map charOccurrences = instance.getCharOccurrences(); + boolean showDistribution = instance.getShowDistribution(); + switch (between(0, 6)) { + case 0: + name = name + "a"; + break; + case 1: + count = randomValueOtherThan(count, () -> randomLongBetween(1, Long.MAX_VALUE)); + break; + case 2: + totalLength = randomValueOtherThan(totalLength, ESTestCase::randomNonNegativeLong); + break; + case 3: + minLength = randomValueOtherThan(minLength, () -> between(0, Integer.MAX_VALUE)); + break; + case 4: + maxLength = randomValueOtherThan(maxLength, () -> between(0, Integer.MAX_VALUE)); + break; + case 5: + charOccurrences = randomValueOtherThan(charOccurrences, this::randomCharOccurrences); + break; + case 6: + showDistribution = showDistribution == false; + break; + } + return new InternalStringStats( + name, + count, + totalLength, + minLength, + maxLength, + charOccurrences, + showDistribution, + DocValueFormat.RAW, + instance.getMetadata() + ); } @Override diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilderTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilderTests.java index 5961ad205c2f2..e0d369148772d 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilderTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilderTests.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.analytics.stringstats; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -28,9 +28,15 @@ public class StringStatsAggregationBuilderTests extends AbstractSerializingTestCase { @Override protected NamedXContentRegistry xContentRegistry() { - return new NamedXContentRegistry(Arrays.asList( - new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, new ParseField(StringStatsAggregationBuilder.NAME), - (p, c) -> StringStatsAggregationBuilder.PARSER.parse(p, (String) c)))); + return new NamedXContentRegistry( + Arrays.asList( + new NamedXContentRegistry.Entry( + BaseAggregationBuilder.class, + new ParseField(StringStatsAggregationBuilder.NAME), + (p, c) -> StringStatsAggregationBuilder.PARSER.parse(p, (String) c) + ) + ) + ); } @Override @@ -73,15 +79,13 @@ protected StringStatsAggregationBuilder mutateInstance(StringStatsAggregationBui } public void testClientBuilder() throws IOException { - AbstractXContentTestCase.xContentTester( - this::createParser, this::createTestInstance, this::toXContentThroughClientBuilder, - p -> { - p.nextToken(); - AggregatorFactories.Builder b = AggregatorFactories.parseAggregators(p); - assertThat(b.getAggregatorFactories(), hasSize(1)); - assertThat(b.getPipelineAggregatorFactories(), empty()); - return (StringStatsAggregationBuilder) b.getAggregatorFactories().iterator().next(); - } ).test(); + AbstractXContentTestCase.xContentTester(this::createParser, this::createTestInstance, this::toXContentThroughClientBuilder, p -> { + p.nextToken(); + AggregatorFactories.Builder b = AggregatorFactories.parseAggregators(p); + assertThat(b.getAggregatorFactories(), hasSize(1)); + assertThat(b.getPipelineAggregatorFactories(), empty()); + return (StringStatsAggregationBuilder) b.getAggregatorFactories().iterator().next(); + }).test(); } private void toXContentThroughClientBuilder(StringStatsAggregationBuilder serverBuilder, XContentBuilder builder) throws IOException { @@ -91,11 +95,10 @@ private void toXContentThroughClientBuilder(StringStatsAggregationBuilder server } private org.elasticsearch.client.analytics.StringStatsAggregationBuilder createClientBuilder( - StringStatsAggregationBuilder serverBuilder) { + StringStatsAggregationBuilder serverBuilder + ) { org.elasticsearch.client.analytics.StringStatsAggregationBuilder builder = - new org.elasticsearch.client.analytics.StringStatsAggregationBuilder(serverBuilder.getName()); - return builder - .showDistribution(serverBuilder.showDistribution()) - .field(serverBuilder.field()); + new org.elasticsearch.client.analytics.StringStatsAggregationBuilder(serverBuilder.getName()); + return builder.showDistribution(serverBuilder.showDistribution()).field(serverBuilder.field()); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java index ab04dc76d9a73..5b3509e6e59ef 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java @@ -18,8 +18,8 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -82,10 +82,10 @@ public void testNoDocs() throws IOException { public void testUnmappedField() throws IOException { StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field("text"); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - for(int i = 0; i < 10; i++) { + for (int i = 0; i < 10; i++) { iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO))); } - }, stats -> { + }, stats -> { assertEquals(0, stats.getCount()); assertEquals(Integer.MIN_VALUE, stats.getMaxLength()); assertEquals(Integer.MAX_VALUE, stats.getMinLength()); @@ -97,11 +97,9 @@ public void testUnmappedField() throws IOException { } public void testUnmappedWithMissingField() throws IOException { - StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name") - .field("text") - .missing("abca"); + StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field("text").missing("abca"); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - for(int i=0; i < 10; i++) { + for (int i = 0; i < 10; i++) { iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO))); } }, stats -> { @@ -121,8 +119,7 @@ public void testMissing() throws IOException { final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); fieldType.setFielddata(true); - final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name") - .field(fieldType.name()) + final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name()) .missing("b"); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -144,7 +141,7 @@ public void testMissing() throws IOException { public void testSingleValuedField() throws IOException { testAggregation(new MatchAllDocsQuery(), iw -> { - for(int i=0; i < 10; i++) { + for (int i = 0; i < 10; i++) { iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO))); } }, stats -> { @@ -162,7 +159,7 @@ public void testSingleValuedField() throws IOException { public void testNoMatchingField() throws IOException { testAggregation(new MatchAllDocsQuery(), iw -> { - for(int i=0; i < 10; i++) { + for (int i = 0; i < 10; i++) { iw.addDocument(singleton(new TextField("wrong_field", "test" + i, Field.Store.NO))); } }, stats -> { @@ -177,7 +174,7 @@ public void testNoMatchingField() throws IOException { public void testQueryFiltering() throws IOException { testAggregation(new TermInSetQuery("text", new BytesRef("test0"), new BytesRef("test1")), iw -> { - for(int i=0; i < 10; i++) { + for (int i = 0; i < 10; i++) { iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO))); } }, stats -> { @@ -198,13 +195,12 @@ public void testSingleValuedFieldWithFormatter() throws IOException { TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); fieldType.setFielddata(true); - StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name") - .field("text") + StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field("text") .format("0000.00") .showDistribution(true); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - for(int i=0; i < 10; i++) { + for (int i = 0; i < 10; i++) { iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO))); } }, stats -> { @@ -225,8 +221,7 @@ public void testNestedAggregation() throws IOException { TextFieldMapper.TextFieldType textFieldType = new TextFieldMapper.TextFieldType("text"); textFieldType.setFielddata(true); - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("terms") - .userValueTypeHint(ValueType.NUMERIC) + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.NUMERIC) .field("value") .subAggregation(new StringStatsAggregationBuilder("text_stats").field("text").userValueTypeHint(ValueType.STRING)); @@ -235,10 +230,9 @@ public void testNestedAggregation() throws IOException { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { for (int j = 0; j < 4; j++) - indexWriter.addDocument(List.of( - new NumericDocValuesField("value", i + 1), - new TextField("text", "test" + j, Field.Store.NO)) - ); + indexWriter.addDocument( + List.of(new NumericDocValuesField("value", i + 1), new TextField("text", "test" + j, Field.Store.NO)) + ); } indexWriter.close(); @@ -282,8 +276,7 @@ public void testValueScriptSingleValuedField() throws IOException { final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); fieldType.setFielddata(true); - final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name") - .field(fieldType.name()) + final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name()) .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -305,19 +298,16 @@ public void testValueScriptMultiValuedField() throws IOException { final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); fieldType.setFielddata(true); - final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name") - .field(fieldType.name()) + final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name()) .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(Set.of( - new TextField(fieldType.name(), "b", Field.Store.NO), - new TextField(fieldType.name(), "c", Field.Store.NO) - )); - iw.addDocument(Set.of( - new TextField(fieldType.name(), "b", Field.Store.NO), - new TextField(fieldType.name(), "c", Field.Store.NO) - )); + iw.addDocument( + Set.of(new TextField(fieldType.name(), "b", Field.Store.NO), new TextField(fieldType.name(), "c", Field.Store.NO)) + ); + iw.addDocument( + Set.of(new TextField(fieldType.name(), "b", Field.Store.NO), new TextField(fieldType.name(), "c", Field.Store.NO)) + ); }, stats -> { assertEquals(4, stats.getCount()); assertEquals(2, stats.getMaxLength()); @@ -335,8 +325,9 @@ public void testFieldScriptSingleValuedField() throws IOException { final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); fieldType.setFielddata(true); - final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", fieldType.name()))); + final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", fieldType.name())) + ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new TextField(fieldType.name(), "b", Field.Store.NO))); @@ -357,18 +348,17 @@ public void testFieldScriptMultiValuedField() throws IOException { final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); fieldType.setFielddata(true); - final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name") - .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", fieldType.name()))); + final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").script( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", fieldType.name())) + ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(Set.of( - new TextField(fieldType.name(), "b", Field.Store.NO), - new TextField(fieldType.name(), "c", Field.Store.NO) - )); - iw.addDocument(Set.of( - new TextField(fieldType.name(), "b", Field.Store.NO), - new TextField(fieldType.name(), "c", Field.Store.NO) - )); + iw.addDocument( + Set.of(new TextField(fieldType.name(), "b", Field.Store.NO), new TextField(fieldType.name(), "c", Field.Store.NO)) + ); + iw.addDocument( + Set.of(new TextField(fieldType.name(), "b", Field.Store.NO), new TextField(fieldType.name(), "c", Field.Store.NO)) + ); }, stats -> { assertEquals(4, stats.getCount()); assertEquals(2, stats.getMaxLength()); @@ -382,9 +372,11 @@ public void testFieldScriptMultiValuedField() throws IOException { }, fieldType); } - private void testAggregation(Query query, + private void testAggregation( + Query query, CheckedConsumer buildIndex, - Consumer verify) throws IOException { + Consumer verify + ) throws IOException { TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); fieldType.setFielddata(true); @@ -404,8 +396,7 @@ private void testAggregation( @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - return new StringStatsAggregationBuilder("_name") - .field(fieldName); + return new StringStatsAggregationBuilder("_name").field(fieldName); } @Override @@ -421,13 +412,13 @@ protected List unsupportedMappedFieldTypes() { @Override protected ScriptService getMockScriptService() { final Map, Object>> scripts = Map.of( - VALUE_SCRIPT_NAME, vars -> "a" + vars.get("_value"), - FIELD_SCRIPT_NAME, vars -> { + VALUE_SCRIPT_NAME, + vars -> "a" + vars.get("_value"), + FIELD_SCRIPT_NAME, + vars -> { final String fieldName = (String) vars.get("field"); final LeafDocLookup lookup = (LeafDocLookup) vars.get("doc"); - return lookup.get(fieldName).stream() - .map(value -> "a" + value) - .collect(toList()); + return lookup.get(fieldName).stream().map(value -> "a" + value).collect(toList()); } ); final MockScriptEngine engine = new MockScriptEngine(MockScriptEngine.NAME, scripts, emptyMap()); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsReduceTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsReduceTests.java index e38d36709a549..7807bf40a150c 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsReduceTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsReduceTests.java @@ -41,9 +41,7 @@ public void testManyToReduce() { InternalTopMetrics first = buildFilled(1, top(SortValue.from(2.0), randomDouble())); InternalTopMetrics min = buildFilled(2, top(SortValue.from(1.0), randomDouble())); InternalTopMetrics max = buildFilled(3, top(SortValue.from(7.0), randomDouble())); - InternalTopMetrics[] metrics = new InternalTopMetrics[] { - first, max, min, buildEmpty(), buildEmpty(), - }; + InternalTopMetrics[] metrics = new InternalTopMetrics[] { first, max, min, buildEmpty(), buildEmpty(), }; InternalTopMetrics winner = first.getSortOrder() == SortOrder.ASC ? min : max; InternalTopMetrics reduced = reduce(metrics); assertThat(reduced.getName(), equalTo("test")); @@ -57,16 +55,16 @@ public void testNonZeroSize() { InternalTopMetrics first = buildFilled(SortOrder.DESC, 3, top(SortValue.from(2.0), 1)); InternalTopMetrics second = buildFilled(2, top(SortValue.from(3.0), 2), top(SortValue.from(1.0), 2)); InternalTopMetrics third = buildFilled(3, top(SortValue.from(8.0), 4), top(SortValue.from(7.0), 5)); - InternalTopMetrics[] metrics = new InternalTopMetrics[] { - first, second, third, buildEmpty(), buildEmpty(), - }; + InternalTopMetrics[] metrics = new InternalTopMetrics[] { first, second, third, buildEmpty(), buildEmpty(), }; InternalTopMetrics reduced = reduce(metrics); assertThat(reduced.getName(), equalTo("test")); assertThat(reduced.getMetricNames(), equalTo(singletonList("test"))); assertThat(reduced.getSortOrder(), equalTo(first.getSortOrder())); assertThat(reduced.getSize(), equalTo(first.getSize())); - assertThat(reduced.getTopMetrics(), equalTo(List.of( - third.getTopMetrics().get(0), third.getTopMetrics().get(1), second.getTopMetrics().get(0)))); + assertThat( + reduced.getTopMetrics(), + equalTo(List.of(third.getTopMetrics().get(0), third.getTopMetrics().get(1), second.getTopMetrics().get(0))) + ); } public void testDifferentTypes() { diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsTests.java index 8ae62e5057bde..2b7f88e458079 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/InternalTopMetricsTests.java @@ -9,12 +9,12 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.client.analytics.ParsedTopMetrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; @@ -58,10 +58,11 @@ public class InternalTopMetricsTests extends InternalAggregationTestCase top = singletonList( - new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(1.0), singletonList(metricOneDouble))); + new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(1.0), singletonList(metricOneDouble)) + ); InternalTopMetrics tm = new InternalTopMetrics("test", sortOrder, singletonList("test"), 1, top, null); - assertThat(Strings.toString(tm, true, true), equalTo( - "{\n" + - " \"test\" : {\n" + - " \"top\" : [\n" + - " {\n" + - " \"sort\" : [\n" + - " 1.0\n" + - " ],\n" + - " \"metrics\" : {\n" + - " \"test\" : 1.0\n" + - " }\n" + - " }\n" + - " ]\n" + - " }\n" + - "}")); + assertThat( + Strings.toString(tm, true, true), + equalTo( + "{\n" + + " \"test\" : {\n" + + " \"top\" : [\n" + + " {\n" + + " \"sort\" : [\n" + + " 1.0\n" + + " ],\n" + + " \"metrics\" : {\n" + + " \"test\" : 1.0\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}" + ) + ); } public void testToXContentDateSortValue() throws IOException { SortValue sortValue = SortValue.from(ZonedDateTime.parse("2007-12-03T10:15:30Z").toInstant().toEpochMilli()); - List top = singletonList(new InternalTopMetrics.TopMetric( - strictDateTime(), sortValue, singletonList(metricOneDouble))); + List top = singletonList( + new InternalTopMetrics.TopMetric(strictDateTime(), sortValue, singletonList(metricOneDouble)) + ); InternalTopMetrics tm = new InternalTopMetrics("test", sortOrder, singletonList("test"), 1, top, null); - assertThat(Strings.toString(tm, true, true), equalTo( - "{\n" + - " \"test\" : {\n" + - " \"top\" : [\n" + - " {\n" + - " \"sort\" : [\n" + - " \"2007-12-03T10:15:30.000Z\"\n" + - " ],\n" + - " \"metrics\" : {\n" + - " \"test\" : 1.0\n" + - " }\n" + - " }\n" + - " ]\n" + - " }\n" + - "}")); + assertThat( + Strings.toString(tm, true, true), + equalTo( + "{\n" + + " \"test\" : {\n" + + " \"top\" : [\n" + + " {\n" + + " \"sort\" : [\n" + + " \"2007-12-03T10:15:30.000Z\"\n" + + " ],\n" + + " \"metrics\" : {\n" + + " \"test\" : 1.0\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}" + ) + ); } public void testToXContentLongMetricValue() throws IOException { List top = singletonList( - new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(1.0), singletonList(metricOneLong))); + new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(1.0), singletonList(metricOneLong)) + ); InternalTopMetrics tm = new InternalTopMetrics("test", sortOrder, singletonList("test"), 1, top, null); - assertThat(Strings.toString(tm, true, true), equalTo( - "{\n" + - " \"test\" : {\n" + - " \"top\" : [\n" + - " {\n" + - " \"sort\" : [\n" + - " 1.0\n" + - " ],\n" + - " \"metrics\" : {\n" + - " \"test\" : 1\n" + - " }\n" + - " }\n" + - " ]\n" + - " }\n" + - "}")); + assertThat( + Strings.toString(tm, true, true), + equalTo( + "{\n" + + " \"test\" : {\n" + + " \"top\" : [\n" + + " {\n" + + " \"sort\" : [\n" + + " 1.0\n" + + " ],\n" + + " \"metrics\" : {\n" + + " \"test\" : 1\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}" + ) + ); } public void testToXContentDateMetricValue() throws IOException { InternalTopMetrics.MetricValue metricValue = new InternalTopMetrics.MetricValue( - strictDateTime(), SortValue.from(ZonedDateTime.parse("2007-12-03T10:15:30Z").toInstant().toEpochMilli())); + strictDateTime(), + SortValue.from(ZonedDateTime.parse("2007-12-03T10:15:30Z").toInstant().toEpochMilli()) + ); List top = singletonList( - new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(1.0), singletonList(metricValue))); + new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(1.0), singletonList(metricValue)) + ); InternalTopMetrics tm = new InternalTopMetrics("test", sortOrder, singletonList("test"), 1, top, null); - assertThat(Strings.toString(tm, true, true), equalTo( - "{\n" + - " \"test\" : {\n" + - " \"top\" : [\n" + - " {\n" + - " \"sort\" : [\n" + - " 1.0\n" + - " ],\n" + - " \"metrics\" : {\n" + - " \"test\" : \"2007-12-03T10:15:30.000Z\"\n" + - " }\n" + - " }\n" + - " ]\n" + - " }\n" + - "}")); + assertThat( + Strings.toString(tm, true, true), + equalTo( + "{\n" + + " \"test\" : {\n" + + " \"top\" : [\n" + + " {\n" + + " \"sort\" : [\n" + + " 1.0\n" + + " ],\n" + + " \"metrics\" : {\n" + + " \"test\" : \"2007-12-03T10:15:30.000Z\"\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}" + ) + ); } public void testToXContentManyMetrics() throws IOException { - List top = singletonList(new InternalTopMetrics.TopMetric( - DocValueFormat.RAW, SortValue.from(1.0), List.of(metricOneDouble, metricOneLong, metricOneDouble))); + List top = singletonList( + new InternalTopMetrics.TopMetric( + DocValueFormat.RAW, + SortValue.from(1.0), + List.of(metricOneDouble, metricOneLong, metricOneDouble) + ) + ); InternalTopMetrics tm = new InternalTopMetrics("test", sortOrder, List.of("foo", "bar", "baz"), 1, top, null); - assertThat(Strings.toString(tm, true, true), equalTo( - "{\n" + - " \"test\" : {\n" + - " \"top\" : [\n" + - " {\n" + - " \"sort\" : [\n" + - " 1.0\n" + - " ],\n" + - " \"metrics\" : {\n" + - " \"foo\" : 1.0,\n" + - " \"bar\" : 1,\n" + - " \"baz\" : 1.0\n" + - " }\n" + - " }\n" + - " ]\n" + - " }\n" + - "}")); + assertThat( + Strings.toString(tm, true, true), + equalTo( + "{\n" + + " \"test\" : {\n" + + " \"top\" : [\n" + + " {\n" + + " \"sort\" : [\n" + + " 1.0\n" + + " ],\n" + + " \"metrics\" : {\n" + + " \"foo\" : 1.0,\n" + + " \"bar\" : 1,\n" + + " \"baz\" : 1.0\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}" + ) + ); } public void testToXContentManyTopMetrics() throws IOException { List top = List.of( - new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(1.0), singletonList(metricOneDouble)), - new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(2.0), singletonList(metricOneLong))); + new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(1.0), singletonList(metricOneDouble)), + new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(2.0), singletonList(metricOneLong)) + ); InternalTopMetrics tm = new InternalTopMetrics("test", sortOrder, singletonList("test"), 2, top, null); - assertThat(Strings.toString(tm, true, true), equalTo( - "{\n" + - " \"test\" : {\n" + - " \"top\" : [\n" + - " {\n" + - " \"sort\" : [\n" + - " 1.0\n" + - " ],\n" + - " \"metrics\" : {\n" + - " \"test\" : 1.0\n" + - " }\n" + - " },\n" + - " {\n" + - " \"sort\" : [\n" + - " 2.0\n" + - " ],\n" + - " \"metrics\" : {\n" + - " \"test\" : 1\n" + - " }\n" + - " }\n" + - " ]\n" + - " }\n" + - "}")); + assertThat( + Strings.toString(tm, true, true), + equalTo( + "{\n" + + " \"test\" : {\n" + + " \"top\" : [\n" + + " {\n" + + " \"sort\" : [\n" + + " 1.0\n" + + " ],\n" + + " \"metrics\" : {\n" + + " \"test\" : 1.0\n" + + " }\n" + + " },\n" + + " {\n" + + " \"sort\" : [\n" + + " 2.0\n" + + " ],\n" + + " \"metrics\" : {\n" + + " \"test\" : 1\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}" + ) + ); } public void testGetProperty() { @@ -259,24 +295,41 @@ private InternalTopMetrics resultWithAllTypes() { @Override protected List getNamedXContents() { - return CollectionUtils.appendToCopy(super.getNamedXContents(), new NamedXContentRegistry.Entry(Aggregation.class, - new ParseField(TopMetricsAggregationBuilder.NAME), (p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c))); + return CollectionUtils.appendToCopy( + super.getNamedXContents(), + new NamedXContentRegistry.Entry( + Aggregation.class, + new ParseField(TopMetricsAggregationBuilder.NAME), + (p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c) + ) + ); } @Override protected InternalTopMetrics createTestInstance(String name, Map metadata) { - return createTestInstance(name, metadata, InternalAggregationTestCase::randomNumericDocValueFormat, - InternalTopMetricsTests::randomSortValue); + return createTestInstance( + name, + metadata, + InternalAggregationTestCase::randomNumericDocValueFormat, + InternalTopMetricsTests::randomSortValue + ); } - private InternalTopMetrics createTestInstance(String name, - Map metadata, Supplier randomDocValueFormat, - Function sortValueSupplier) { + private InternalTopMetrics createTestInstance( + String name, + Map metadata, + Supplier randomDocValueFormat, + Function sortValueSupplier + ) { int metricCount = between(1, 5); List metricNames = randomMetricNames(metricCount); int size = between(1, 100); - List topMetrics = randomTopMetrics(randomDocValueFormat, between(0, size), metricCount, - sortValueSupplier); + List topMetrics = randomTopMetrics( + randomDocValueFormat, + between(0, size), + metricCount, + sortValueSupplier + ); return new InternalTopMetrics(name, sortOrder, metricNames, size, topMetrics, metadata); } @@ -288,29 +341,35 @@ protected InternalTopMetrics mutateInstance(InternalTopMetrics instance) throws int size = instance.getSize(); List topMetrics = instance.getTopMetrics(); switch (randomInt(4)) { - case 0: - name = randomAlphaOfLength(6); - break; - case 1: - sortOrder = sortOrder == SortOrder.ASC ? SortOrder.DESC : SortOrder.ASC; - Collections.reverse(topMetrics); - break; - case 2: - metricNames = new ArrayList<>(metricNames); - metricNames.set(randomInt(metricNames.size() - 1), randomAlphaOfLength(6)); - break; - case 3: - size = randomValueOtherThan(size, () -> between(1, 100)); - break; - case 4: - int fixedSize = size; - int fixedMetricsSize = metricNames.size(); - topMetrics = randomValueOtherThan(topMetrics, () -> randomTopMetrics( - InternalAggregationTestCase::randomNumericDocValueFormat, between(1, fixedSize), fixedMetricsSize, - InternalTopMetricsTests::randomSortValue)); - break; - default: - throw new IllegalArgumentException("bad mutation"); + case 0: + name = randomAlphaOfLength(6); + break; + case 1: + sortOrder = sortOrder == SortOrder.ASC ? SortOrder.DESC : SortOrder.ASC; + Collections.reverse(topMetrics); + break; + case 2: + metricNames = new ArrayList<>(metricNames); + metricNames.set(randomInt(metricNames.size() - 1), randomAlphaOfLength(6)); + break; + case 3: + size = randomValueOtherThan(size, () -> between(1, 100)); + break; + case 4: + int fixedSize = size; + int fixedMetricsSize = metricNames.size(); + topMetrics = randomValueOtherThan( + topMetrics, + () -> randomTopMetrics( + InternalAggregationTestCase::randomNumericDocValueFormat, + between(1, fixedSize), + fixedMetricsSize, + InternalTopMetricsTests::randomSortValue + ) + ); + break; + default: + throw new IllegalArgumentException("bad mutation"); } return new InternalTopMetrics(name, sortOrder, metricNames, size, topMetrics, instance.getMetadata()); } @@ -321,8 +380,12 @@ protected InternalTopMetrics mutateInstance(InternalTopMetrics instance) throws * implement {@link Object#equals(Object)}. */ public void testFromXContentDates() throws IOException { - InternalTopMetrics aggregation = createTestInstance(randomAlphaOfLength(3), - emptyMap(), InternalTopMetricsTests::strictDateTime, InternalTopMetricsTests::randomSortValue); + InternalTopMetrics aggregation = createTestInstance( + randomAlphaOfLength(3), + emptyMap(), + InternalTopMetricsTests::strictDateTime, + InternalTopMetricsTests::randomSortValue + ); ParsedAggregation parsedAggregation = parseAndAssert(aggregation, randomBoolean(), randomBoolean()); assertFromXContent(aggregation, parsedAggregation); } @@ -335,8 +398,9 @@ protected void assertFromXContent(InternalTopMetrics aggregation, ParsedAggregat for (int i = 0; i < parsed.getTopMetrics().size(); i++) { ParsedTopMetrics.TopMetrics parsedTop = parsed.getTopMetrics().get(i); InternalTopMetrics.TopMetric internalTop = aggregation.getTopMetrics().get(i); - Object expectedSort = internalTop.getSortFormat() == DocValueFormat.RAW ? - internalTop.getSortValue().getKey() : internalTop.getSortValue().format(internalTop.getSortFormat()); + Object expectedSort = internalTop.getSortFormat() == DocValueFormat.RAW + ? internalTop.getSortValue().getKey() + : internalTop.getSortValue().format(internalTop.getSortFormat()); assertThat(parsedTop.getSort(), equalTo(singletonList(expectedSort))); assertThat(parsedTop.getMetrics().keySet(), hasSize(aggregation.getMetricNames().size())); for (int m = 0; m < aggregation.getMetricNames().size(); m++) { @@ -389,18 +453,20 @@ protected void assertReduced(InternalTopMetrics reduced, List randomTopMetrics(Supplier randomDocValueFormat, int length, int metricCount, - Function sortValueSupplier) { - return IntStream.range(0, length) - .mapToObj(i -> { - DocValueFormat docValueFormat = randomDocValueFormat.get(); - return new InternalTopMetrics.TopMetric( - docValueFormat, sortValueSupplier.apply(docValueFormat), - randomMetricValues(randomDocValueFormat, metricCount, sortValueSupplier) - ); - }) - .sorted((lhs, rhs) -> sortOrder.reverseMul() * lhs.getSortValue().compareTo(rhs.getSortValue())) - .collect(toList()); + private List randomTopMetrics( + Supplier randomDocValueFormat, + int length, + int metricCount, + Function sortValueSupplier + ) { + return IntStream.range(0, length).mapToObj(i -> { + DocValueFormat docValueFormat = randomDocValueFormat.get(); + return new InternalTopMetrics.TopMetric( + docValueFormat, + sortValueSupplier.apply(docValueFormat), + randomMetricValues(randomDocValueFormat, metricCount, sortValueSupplier) + ); + }).sorted((lhs, rhs) -> sortOrder.reverseMul() * lhs.getSortValue().compareTo(rhs.getSortValue())).collect(toList()); } static List randomMetricNames(int metricCount) { @@ -411,22 +477,25 @@ static List randomMetricNames(int metricCount) { return new ArrayList<>(names); } - private List randomMetricValues(Supplier randomDocValueFormat, int metricCount, - Function sortValueSupplier) { - return IntStream.range(0, metricCount) - .mapToObj(i -> { - DocValueFormat format = randomDocValueFormat.get(); - return new InternalTopMetrics.MetricValue(format, sortValueSupplier.apply(format)); - }) - .collect(toList()); + private List randomMetricValues( + Supplier randomDocValueFormat, + int metricCount, + Function sortValueSupplier + ) { + return IntStream.range(0, metricCount).mapToObj(i -> { + DocValueFormat format = randomDocValueFormat.get(); + return new InternalTopMetrics.MetricValue(format, sortValueSupplier.apply(format)); + }).collect(toList()); } private static DocValueFormat strictDateTime() { return new DocValueFormat.DateTime( - DateFormatter.forPattern("strict_date_time"), ZoneId.of("UTC"), DateFieldMapper.Resolution.MILLISECONDS); + DateFormatter.forPattern("strict_date_time"), + ZoneId.of("UTC"), + DateFieldMapper.Resolution.MILLISECONDS + ); } - private static SortValue randomSortValue() { if (randomBoolean()) { return SortValue.from(randomLong()); @@ -435,17 +504,15 @@ private static SortValue randomSortValue() { } private static SortValue randomSortValue(DocValueFormat docValueFormat) { - if(docValueFormat instanceof DocValueFormat.DateTime){ + if (docValueFormat instanceof DocValueFormat.DateTime) { if (randomBoolean()) { return SortValue.from(randomLongBetween(DateUtils.MAX_MILLIS_BEFORE_MINUS_9999, DateUtils.MAX_MILLIS_BEFORE_9999)); } - return SortValue.from( - randomDoubleBetween(DateUtils.MAX_MILLIS_BEFORE_MINUS_9999, DateUtils.MAX_MILLIS_BEFORE_9999, true)); + return SortValue.from(randomDoubleBetween(DateUtils.MAX_MILLIS_BEFORE_MINUS_9999, DateUtils.MAX_MILLIS_BEFORE_9999, true)); } - return randomSortValue(); + return randomSortValue(); } - @Override protected Predicate excludePathsFromXContentInsertion() { return path -> path.endsWith(".metrics"); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilderTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilderTests.java index 14478ea103444..be06f88c98ee8 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilderTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilderTests.java @@ -7,10 +7,10 @@ package org.elasticsearch.xpack.analytics.topmetrics; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -36,15 +36,22 @@ public class TopMetricsAggregationBuilderTests extends AbstractSerializingTestCase { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Arrays.asList( - new NamedWriteableRegistry.Entry(SortBuilder.class, FieldSortBuilder.NAME, FieldSortBuilder::new))); + return new NamedWriteableRegistry( + Arrays.asList(new NamedWriteableRegistry.Entry(SortBuilder.class, FieldSortBuilder.NAME, FieldSortBuilder::new)) + ); } @Override protected NamedXContentRegistry xContentRegistry() { - return new NamedXContentRegistry(Arrays.asList( - new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, new ParseField(TopMetricsAggregationBuilder.NAME), - (p, c) -> TopMetricsAggregationBuilder.PARSER.parse(p, (String) c)))); + return new NamedXContentRegistry( + Arrays.asList( + new NamedXContentRegistry.Entry( + BaseAggregationBuilder.class, + new ParseField(TopMetricsAggregationBuilder.NAME), + (p, c) -> TopMetricsAggregationBuilder.PARSER.parse(p, (String) c) + ) + ) + ); } @Override @@ -69,27 +76,24 @@ protected Reader instanceReader() { @Override protected TopMetricsAggregationBuilder createTestInstance() { List> sortBuilders = singletonList( - new FieldSortBuilder(randomAlphaOfLength(5)).order(randomFrom(SortOrder.values()))); - List metricFields = InternalTopMetricsTests.randomMetricNames(between(1, 5)).stream() - .map(name -> { - MultiValuesSourceFieldConfig.Builder metricField = new MultiValuesSourceFieldConfig.Builder(); - metricField.setFieldName(randomAlphaOfLength(5)).setMissing(1.0); - return metricField.build(); - }) - .collect(toList()); + new FieldSortBuilder(randomAlphaOfLength(5)).order(randomFrom(SortOrder.values())) + ); + List metricFields = InternalTopMetricsTests.randomMetricNames(between(1, 5)).stream().map(name -> { + MultiValuesSourceFieldConfig.Builder metricField = new MultiValuesSourceFieldConfig.Builder(); + metricField.setFieldName(randomAlphaOfLength(5)).setMissing(1.0); + return metricField.build(); + }).collect(toList()); return new TopMetricsAggregationBuilder(randomAlphaOfLength(5), sortBuilders, between(1, 100), metricFields); } public void testClientBuilder() throws IOException { - AbstractXContentTestCase.xContentTester( - this::createParser, this::createTestInstance, this::toXContentThroughClientBuilder, - p -> { - p.nextToken(); - AggregatorFactories.Builder b = AggregatorFactories.parseAggregators(p); - assertThat(b.getAggregatorFactories(), hasSize(1)); - assertThat(b.getPipelineAggregatorFactories(), empty()); - return (TopMetricsAggregationBuilder) b.getAggregatorFactories().iterator().next(); - } ).test(); + AbstractXContentTestCase.xContentTester(this::createParser, this::createTestInstance, this::toXContentThroughClientBuilder, p -> { + p.nextToken(); + AggregatorFactories.Builder b = AggregatorFactories.parseAggregators(p); + assertThat(b.getAggregatorFactories(), hasSize(1)); + assertThat(b.getPipelineAggregatorFactories(), empty()); + return (TopMetricsAggregationBuilder) b.getAggregatorFactories().iterator().next(); + }).test(); } private void toXContentThroughClientBuilder(TopMetricsAggregationBuilder serverBuilder, XContentBuilder builder) throws IOException { @@ -99,12 +103,14 @@ private void toXContentThroughClientBuilder(TopMetricsAggregationBuilder serverB } private org.elasticsearch.client.analytics.TopMetricsAggregationBuilder createClientBuilder( - TopMetricsAggregationBuilder serverBuilder) { + TopMetricsAggregationBuilder serverBuilder + ) { assertThat(serverBuilder.getSortBuilders(), hasSize(1)); return new org.elasticsearch.client.analytics.TopMetricsAggregationBuilder( - serverBuilder.getName(), - serverBuilder.getSortBuilders().get(0), - serverBuilder.getSize(), - serverBuilder.getMetricFields().stream().map(MultiValuesSourceFieldConfig::getFieldName).toArray(String[]::new)); + serverBuilder.getName(), + serverBuilder.getSortBuilders().get(0), + serverBuilder.getSize(), + serverBuilder.getMetricFields().stream().map(MultiValuesSourceFieldConfig::getFieldName).toArray(String[]::new) + ); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorMetricsTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorMetricsTests.java index 1288d086edb63..0ab41367b6d50 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorMetricsTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorMetricsTests.java @@ -10,9 +10,9 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.DocValueFormat; diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java index 4b157fa4d96eb..ac30d0e079d63 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java @@ -27,13 +27,13 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -86,20 +86,20 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; - public class TopMetricsAggregatorTests extends AggregatorTestCase { public void testNoDocs() throws IOException { - InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {}, - doubleFields()); + InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {}, doubleFields()); assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); assertThat(result.getTopMetrics(), equalTo(emptyList())); } public void testUnmappedMetric() throws IOException { - InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> { - writer.addDocument(singletonList(doubleField("s", 1.0))); - }, - numberFieldType(NumberType.DOUBLE, "s")); + InternalTopMetrics result = collect( + simpleBuilder(), + new MatchAllDocsQuery(), + writer -> { writer.addDocument(singletonList(doubleField("s", 1.0))); }, + numberFieldType(NumberType.DOUBLE, "s") + ); assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); assertThat(result.getTopMetrics(), hasSize(1)); assertThat(result.getTopMetrics().get(0).getSortValue(), equalTo(SortValue.from(1.0))); @@ -107,10 +107,12 @@ public void testUnmappedMetric() throws IOException { } public void testMissingValueForDoubleMetric() throws IOException { - InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> { - writer.addDocument(singletonList(doubleField("s", 1.0))); - }, - doubleFields()); + InternalTopMetrics result = collect( + simpleBuilder(), + new MatchAllDocsQuery(), + writer -> { writer.addDocument(singletonList(doubleField("s", 1.0))); }, + doubleFields() + ); assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); assertThat(result.getTopMetrics(), hasSize(1)); assertThat(result.getTopMetrics().get(0).getSortValue(), equalTo(SortValue.from(1.0))); @@ -118,10 +120,12 @@ public void testMissingValueForDoubleMetric() throws IOException { } public void testMissingValueForLongMetric() throws IOException { - InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> { - writer.addDocument(singletonList(longField("s", 1))); - }, - longFields()); + InternalTopMetrics result = collect( + simpleBuilder(), + new MatchAllDocsQuery(), + writer -> { writer.addDocument(singletonList(longField("s", 1))); }, + longFields() + ); assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); assertThat(result.getTopMetrics(), hasSize(1)); assertThat(result.getTopMetrics().get(0).getSortValue(), equalTo(SortValue.from(1))); @@ -129,29 +133,32 @@ public void testMissingValueForLongMetric() throws IOException { } public void testActualValueForDoubleMetric() throws IOException { - InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> { - writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0))); - }, - doubleFields()); + InternalTopMetrics result = collect( + simpleBuilder(), + new MatchAllDocsQuery(), + writer -> { writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0))); }, + doubleFields() + ); assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); assertThat(result.getTopMetrics(), equalTo(singletonList(top(1.0, 2.0)))); } public void testActualValueForLongMetric() throws IOException { - InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> { - writer.addDocument(Arrays.asList(longField("s", 1), longField("m", 2))); - }, - longFields()); + InternalTopMetrics result = collect( + simpleBuilder(), + new MatchAllDocsQuery(), + writer -> { writer.addDocument(Arrays.asList(longField("s", 1), longField("m", 2))); }, + longFields() + ); assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); assertThat(result.getTopMetrics(), equalTo(singletonList(top(1, 2)))); } private InternalTopMetrics collectFromDoubles(TopMetricsAggregationBuilder builder) throws IOException { return collect(builder, new MatchAllDocsQuery(), writer -> { - writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0))); - writer.addDocument(Arrays.asList(doubleField("s", 2.0), doubleField("m", 3.0))); - }, - doubleFields()); + writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(doubleField("s", 2.0), doubleField("m", 3.0))); + }, doubleFields()); } public void testSortByDoubleAscending() throws IOException { @@ -181,10 +188,9 @@ public void testSortByDoubleTwoHits() throws IOException { public void testSortByFloatAscending() throws IOException { TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC)); InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { - writer.addDocument(Arrays.asList(floatField("s", 1.0F), doubleField("m", 2.0))); - writer.addDocument(Arrays.asList(floatField("s", 2.0F), doubleField("m", 3.0))); - }, - floatAndDoubleField()); + writer.addDocument(Arrays.asList(floatField("s", 1.0F), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(floatField("s", 2.0F), doubleField("m", 3.0))); + }, floatAndDoubleField()); assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); assertThat(result.getTopMetrics(), equalTo(singletonList(top(1.0, 2.0d)))); } @@ -192,10 +198,9 @@ public void testSortByFloatAscending() throws IOException { public void testSortByFloatDescending() throws IOException { TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.DESC)); InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { - writer.addDocument(Arrays.asList(floatField("s", 1.0F), doubleField("m", 2.0))); - writer.addDocument(Arrays.asList(floatField("s", 2.0F), doubleField("m", 3.0))); - }, - floatAndDoubleField()); + writer.addDocument(Arrays.asList(floatField("s", 1.0F), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(floatField("s", 2.0F), doubleField("m", 3.0))); + }, floatAndDoubleField()); assertThat(result.getSortOrder(), equalTo(SortOrder.DESC)); assertThat(result.getTopMetrics(), equalTo(singletonList(top(2.0, 3.0)))); } @@ -203,10 +208,9 @@ public void testSortByFloatDescending() throws IOException { public void testSortByLongAscending() throws IOException { TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC)); InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { - writer.addDocument(Arrays.asList(longField("s", 10), doubleField("m", 2.0))); - writer.addDocument(Arrays.asList(longField("s", 20), doubleField("m", 3.0))); - }, - longAndDoubleField()); + writer.addDocument(Arrays.asList(longField("s", 10), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(longField("s", 20), doubleField("m", 3.0))); + }, longAndDoubleField()); assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); assertThat(result.getTopMetrics(), equalTo(singletonList(top(10, 2.0)))); } @@ -214,10 +218,9 @@ public void testSortByLongAscending() throws IOException { public void testSortByLongDescending() throws IOException { TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.DESC)); InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { - writer.addDocument(Arrays.asList(longField("s", 10), doubleField("m", 2.0))); - writer.addDocument(Arrays.asList(longField("s", 20), doubleField("m", 3.0))); - }, - longAndDoubleField()); + writer.addDocument(Arrays.asList(longField("s", 10), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(longField("s", 20), doubleField("m", 3.0))); + }, longAndDoubleField()); assertThat(result.getSortOrder(), equalTo(SortOrder.DESC)); assertThat(result.getTopMetrics(), equalTo(singletonList(top(20, 3.0)))); } @@ -225,10 +228,9 @@ public void testSortByLongDescending() throws IOException { public void testSortByScoreDescending() throws IOException { TopMetricsAggregationBuilder builder = simpleBuilder(new ScoreSortBuilder().order(SortOrder.DESC)); InternalTopMetrics result = collect(builder, boostFoo(), writer -> { - writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0))); - writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0))); - }, - textAndDoubleField()); + writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0))); + }, textAndDoubleField()); assertThat(result.getSortOrder(), equalTo(SortOrder.DESC)); assertThat(result.getTopMetrics(), equalTo(singletonList(top(2.0, 2.0)))); } @@ -236,10 +238,9 @@ public void testSortByScoreDescending() throws IOException { public void testSortByScoreAscending() throws IOException { TopMetricsAggregationBuilder builder = simpleBuilder(new ScoreSortBuilder().order(SortOrder.ASC)); InternalTopMetrics result = collect(builder, boostFoo(), writer -> { - writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0))); - writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0))); - }, - textAndDoubleField()); + writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0))); + }, textAndDoubleField()); assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); assertThat(result.getTopMetrics(), equalTo(singletonList(top(1.0, 3.0)))); } @@ -247,10 +248,9 @@ public void testSortByScoreAscending() throws IOException { public void testSortByScriptDescending() throws IOException { TopMetricsAggregationBuilder builder = simpleBuilder(scriptSortOnS().order(SortOrder.DESC)); InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { - writer.addDocument(Arrays.asList(doubleField("s", 2), doubleField("m", 2.0))); - writer.addDocument(Arrays.asList(doubleField("s", 1), doubleField("m", 3.0))); - }, - doubleFields()); + writer.addDocument(Arrays.asList(doubleField("s", 2), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(doubleField("s", 1), doubleField("m", 3.0))); + }, doubleFields()); assertThat(result.getSortOrder(), equalTo(SortOrder.DESC)); assertThat(result.getTopMetrics(), equalTo(singletonList(top(2.0, 2.0)))); } @@ -258,10 +258,9 @@ public void testSortByScriptDescending() throws IOException { public void testSortByScriptAscending() throws IOException { TopMetricsAggregationBuilder builder = simpleBuilder(scriptSortOnS().order(SortOrder.ASC)); InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { - writer.addDocument(Arrays.asList(doubleField("s", 2), doubleField("m", 2.0))); - writer.addDocument(Arrays.asList(doubleField("s", 1), doubleField("m", 3.0))); - }, - doubleFields()); + writer.addDocument(Arrays.asList(doubleField("s", 2), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(doubleField("s", 1), doubleField("m", 3.0))); + }, doubleFields()); assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); assertThat(result.getTopMetrics(), equalTo(singletonList(top(1.0, 3.0)))); } @@ -270,20 +269,20 @@ public void testSortByStringScriptFails() throws IOException { Script script = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "s", emptyMap()); TopMetricsAggregationBuilder builder = simpleBuilder(new ScriptSortBuilder(script, ScriptSortType.STRING)); Exception e = expectThrows(IllegalArgumentException.class, () -> collect(builder, boostFoo(), writer -> { - writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0))); - writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0))); - }, - textAndDoubleField())); - assertThat(e.getMessage(), equalTo( - "error building sort for [_script]: script sorting only supported on [numeric] scripts but was [string]")); + writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0))); + }, textAndDoubleField())); + assertThat( + e.getMessage(), + equalTo("error building sort for [_script]: script sorting only supported on [numeric] scripts but was [string]") + ); } private InternalTopMetrics collectFromNewYorkAndLA(TopMetricsAggregationBuilder builder) throws IOException { return collect(builder, new MatchAllDocsQuery(), writer -> { writer.addDocument(Arrays.asList(geoPointField("s", 40.7128, -74.0060), doubleField("m", 2.0))); writer.addDocument(Arrays.asList(geoPointField("s", 34.0522, -118.2437), doubleField("m", 3.0))); - }, - geoPointAndDoubleField()); + }, geoPointAndDoubleField()); } public void testSortByGeoDistancDescending() throws IOException { @@ -312,11 +311,10 @@ public void testInsideTerms() throws IOException { TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC)); TermsAggregationBuilder terms = new TermsAggregationBuilder("terms").field("c").subAggregation(builder); Terms result = (Terms) collect(terms, new MatchAllDocsQuery(), writer -> { - writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 1.0), doubleField("m", 2.0))); - writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 2.0), doubleField("m", 3.0))); - writer.addDocument(Arrays.asList(doubleField("c", 2.0), doubleField("s", 4.0), doubleField("m", 9.0))); - }, - numberFieldType(NumberType.DOUBLE, "c"), numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m")); + writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 1.0), doubleField("m", 2.0))); + writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 2.0), doubleField("m", 3.0))); + writer.addDocument(Arrays.asList(doubleField("c", 2.0), doubleField("s", 4.0), doubleField("m", 9.0))); + }, numberFieldType(NumberType.DOUBLE, "c"), numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m")); Terms.Bucket bucket1 = result.getBuckets().get(0); assertThat(bucket1.getKey(), equalTo(1.0)); InternalTopMetrics top1 = bucket1.getAggregations().get("test"); @@ -390,22 +388,33 @@ public void testTonsOfBucketsTriggersBreaker() throws IOException { public void testManyMetrics() throws IOException { List> sorts = singletonList(new FieldSortBuilder("s").order(SortOrder.ASC)); - TopMetricsAggregationBuilder builder = new TopMetricsAggregationBuilder("test", sorts, 1, - List.of( - new MultiValuesSourceFieldConfig.Builder().setFieldName("m1").build(), - new MultiValuesSourceFieldConfig.Builder().setFieldName("m2").build(), - new MultiValuesSourceFieldConfig.Builder().setFieldName("m3").build() - )); + TopMetricsAggregationBuilder builder = new TopMetricsAggregationBuilder( + "test", + sorts, + 1, + List.of( + new MultiValuesSourceFieldConfig.Builder().setFieldName("m1").build(), + new MultiValuesSourceFieldConfig.Builder().setFieldName("m2").build(), + new MultiValuesSourceFieldConfig.Builder().setFieldName("m3").build() + ) + ); InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> { - writer.addDocument(Arrays.asList(doubleField("s", 1.0), - doubleField("m1", 12.0), longField("m2", 22), doubleField("m3", 32.0))); - writer.addDocument(Arrays.asList(doubleField("s", 2.0), - doubleField("m1", 13.0), longField("m2", 23), doubleField("m3", 33.0))); - }, manyMetricsFields()); + writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m1", 12.0), longField("m2", 22), doubleField("m3", 32.0))); + writer.addDocument(Arrays.asList(doubleField("s", 2.0), doubleField("m1", 13.0), longField("m2", 23), doubleField("m3", 33.0))); + }, manyMetricsFields()); assertThat(result.getSortOrder(), equalTo(SortOrder.ASC)); - assertThat(result.getTopMetrics(), equalTo(singletonList( - new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(1.0), metricValues( - SortValue.from(12.0), SortValue.from(22), SortValue.from(32.0)))))); + assertThat( + result.getTopMetrics(), + equalTo( + singletonList( + new InternalTopMetrics.TopMetric( + DocValueFormat.RAW, + SortValue.from(1.0), + metricValues(SortValue.from(12.0), SortValue.from(22), SortValue.from(32.0)) + ) + ) + ) + ); } private TopMetricsAggregationBuilder simpleBuilder() { @@ -417,8 +426,12 @@ private TopMetricsAggregationBuilder simpleBuilder(SortBuilder sort) { } private TopMetricsAggregationBuilder simpleBuilder(SortBuilder sort, int size) { - return new TopMetricsAggregationBuilder("test", singletonList(sort), size, - singletonList(new MultiValuesSourceFieldConfig.Builder().setFieldName("m").build())); + return new TopMetricsAggregationBuilder( + "test", + singletonList(sort), + size, + singletonList(new MultiValuesSourceFieldConfig.Builder().setFieldName("m").build()) + ); } /** @@ -428,44 +441,41 @@ private TopMetricsAggregationBuilder simpleBuilder(SortBuilder sort, int size * very quite a bit but this is super predictable. */ private Query boostFoo() { - return new BooleanQuery.Builder() - .add(new BooleanClause(new MatchAllDocsQuery(), Occur.MUST)) - .add(new BooleanClause(new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("s", "foo"))), 1.0f), Occur.SHOULD)) - .build(); + return new BooleanQuery.Builder().add(new BooleanClause(new MatchAllDocsQuery(), Occur.MUST)) + .add(new BooleanClause(new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("s", "foo"))), 1.0f), Occur.SHOULD)) + .build(); } private MappedFieldType[] doubleFields() { - return new MappedFieldType[] {numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m")}; + return new MappedFieldType[] { numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m") }; } private MappedFieldType[] longFields() { - return new MappedFieldType[] {numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.LONG, "m")}; + return new MappedFieldType[] { numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.LONG, "m") }; } private MappedFieldType[] manyMetricsFields() { return new MappedFieldType[] { - numberFieldType(NumberType.DOUBLE, "s"), - numberFieldType(NumberType.DOUBLE, "m1"), - numberFieldType(NumberType.LONG, "m2"), - numberFieldType(NumberType.DOUBLE, "m3"), - }; + numberFieldType(NumberType.DOUBLE, "s"), + numberFieldType(NumberType.DOUBLE, "m1"), + numberFieldType(NumberType.LONG, "m2"), + numberFieldType(NumberType.DOUBLE, "m3"), }; } - private MappedFieldType[] floatAndDoubleField() { - return new MappedFieldType[] {numberFieldType(NumberType.FLOAT, "s"), numberFieldType(NumberType.DOUBLE, "m")}; + return new MappedFieldType[] { numberFieldType(NumberType.FLOAT, "s"), numberFieldType(NumberType.DOUBLE, "m") }; } private MappedFieldType[] longAndDoubleField() { - return new MappedFieldType[] {numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.DOUBLE, "m")}; + return new MappedFieldType[] { numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.DOUBLE, "m") }; } private MappedFieldType[] textAndDoubleField() { - return new MappedFieldType[] {textFieldType("s"), numberFieldType(NumberType.DOUBLE, "m")}; + return new MappedFieldType[] { textFieldType("s"), numberFieldType(NumberType.DOUBLE, "m") }; } private MappedFieldType[] geoPointAndDoubleField() { - return new MappedFieldType[] {geoPointFieldType("s"), numberFieldType(NumberType.DOUBLE, "m")}; + return new MappedFieldType[] { geoPointFieldType("s"), numberFieldType(NumberType.DOUBLE, "m") }; } private MappedFieldType numberFieldType(NumberType numberType, String name) { @@ -500,18 +510,27 @@ private IndexableField geoPointField(String name, double lat, double lon) { return new LatLonDocValuesField(name, lat, lon); } - private InternalTopMetrics collect(TopMetricsAggregationBuilder builder, Query query, - CheckedConsumer buildIndex, MappedFieldType... fields) throws IOException { + private InternalTopMetrics collect( + TopMetricsAggregationBuilder builder, + Query query, + CheckedConsumer buildIndex, + MappedFieldType... fields + ) throws IOException { InternalTopMetrics result = (InternalTopMetrics) collect((AggregationBuilder) builder, query, buildIndex, fields); - List expectedFieldNames = builder.getMetricFields().stream() - .map(MultiValuesSourceFieldConfig::getFieldName) - .collect(toList()); + List expectedFieldNames = builder.getMetricFields() + .stream() + .map(MultiValuesSourceFieldConfig::getFieldName) + .collect(toList()); assertThat(result.getMetricNames(), equalTo(expectedFieldNames)); return result; } - private InternalAggregation collect(AggregationBuilder builder, Query query, - CheckedConsumer buildIndex, MappedFieldType... fields) throws IOException { + private InternalAggregation collect( + AggregationBuilder builder, + Query query, + CheckedConsumer buildIndex, + MappedFieldType... fields + ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { buildIndex.accept(indexWriter); @@ -547,9 +566,7 @@ private List metricValues(long... metricValues) } private List metricValues(SortValue... metricValues) { - return Arrays.stream(metricValues) - .map(v -> new InternalTopMetrics.MetricValue(DocValueFormat.RAW, v)) - .collect(toList()); + return Arrays.stream(metricValues).map(v -> new InternalTopMetrics.MetricValue(DocValueFormat.RAW, v)).collect(toList()); } /** @@ -561,14 +578,12 @@ private ScriptSortBuilder scriptSortOnS() { @Override protected ScriptService getMockScriptService() { - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, - singletonMap("s", args -> { - @SuppressWarnings("unchecked") - Map> fields = (Map>) args.get("doc"); - ScriptDocValues.Doubles field = (ScriptDocValues.Doubles) fields.get("s"); - return field.getValue(); - }), - emptyMap()); + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, singletonMap("s", args -> { + @SuppressWarnings("unchecked") + Map> fields = (Map>) args.get("doc"); + ScriptDocValues.Doubles field = (ScriptDocValues.Doubles) fields.get("s"); + return field.getValue(); + }), emptyMap()); Map engines = singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java index ed012e983294d..38e8eb91cb76f 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java @@ -7,12 +7,12 @@ package org.elasticsearch.xpack.analytics.ttest; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregation; @@ -115,10 +115,12 @@ protected InternalTTest mutateInstance(InternalTTest instance) { @Override protected List getNamedXContents() { - return CollectionUtils.appendToCopy(super.getNamedXContents(), new NamedXContentRegistry.Entry(Aggregation.class, new ParseField( - TTestAggregationBuilder.NAME), (p, c) -> { - assumeTrue("There is no ParsedTTest yet", false); - return null; - })); + return CollectionUtils.appendToCopy( + super.getNamedXContents(), + new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(TTestAggregationBuilder.NAME), (p, c) -> { + assumeTrue("There is no ParsedTTest yet", false); + return null; + }) + ); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilderTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilderTests.java index de5981b9e2e7f..47a562432b3ac 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilderTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilderTests.java @@ -7,11 +7,11 @@ package org.elasticsearch.xpack.analytics.ttest; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.script.Script; @@ -70,9 +70,7 @@ protected TTestAggregationBuilder createTestInstance() { if (tTestType != TTestType.PAIRED && randomBoolean()) { bConfig.setFilter(QueryBuilders.queryStringQuery(randomAlphaOfLength(10))); } - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder(aggregationName) - .a(aConfig.build()) - .b(bConfig.build()); + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder(aggregationName).a(aConfig.build()).b(bConfig.build()); if (randomBoolean()) { aggregationBuilder.tails(randomIntBetween(1, 2)); } @@ -89,19 +87,20 @@ protected Writeable.Reader instanceReader() { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, Collections.emptyList()) - .getNamedWriteables()); + return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedWriteables()); } @Override protected NamedXContentRegistry xContentRegistry() { List namedXContent = new ArrayList<>(); - namedXContent.add(new NamedXContentRegistry.Entry( - BaseAggregationBuilder.class, - new ParseField(TTestAggregationBuilder.NAME), - (p, n) -> TTestAggregationBuilder.PARSER.apply(p, (String) n))); + namedXContent.add( + new NamedXContentRegistry.Entry( + BaseAggregationBuilder.class, + new ParseField(TTestAggregationBuilder.NAME), + (p, n) -> TTestAggregationBuilder.PARSER.apply(p, (String) n) + ) + ); namedXContent.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents()); return new NamedXContentRegistry(namedXContent); } } - diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java index 4da44dc88a015..70f59362c4081 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java @@ -13,9 +13,9 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateUtils; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; @@ -67,39 +67,50 @@ public class TTestAggregatorTests extends AggregatorTestCase { @Override protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { if (fieldType instanceof NumberFieldMapper.NumberFieldType) { - return new TTestAggregationBuilder("foo") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) - .setFilter(QueryBuilders.rangeQuery(fieldName).lt(10)).build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) - .setFilter(QueryBuilders.rangeQuery(fieldName).gte(10)).build()); + return new TTestAggregationBuilder("foo").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) + .setFilter(QueryBuilders.rangeQuery(fieldName).lt(10)) + .build() + ) + .b( + new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) + .setFilter(QueryBuilders.rangeQuery(fieldName).gte(10)) + .build() + ); } else if (fieldType.typeName().equals(DateFieldMapper.CONTENT_TYPE) || fieldType.typeName().equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) { - return new TTestAggregationBuilder("foo") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) - .setFilter(QueryBuilders.rangeQuery(fieldName).lt(DateUtils.toInstant(10))).build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) - .setFilter(QueryBuilders.rangeQuery(fieldName).gte(DateUtils.toInstant(10))).build()); - } else if (fieldType.typeName().equals(BooleanFieldMapper.CONTENT_TYPE)) { - return new TTestAggregationBuilder("foo") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) - .setFilter(QueryBuilders.rangeQuery(fieldName).lt("true")).build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) - .setFilter(QueryBuilders.rangeQuery(fieldName).gte("false")).build()); - } + return new TTestAggregationBuilder("foo").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) + .setFilter(QueryBuilders.rangeQuery(fieldName).lt(DateUtils.toInstant(10))) + .build() + ) + .b( + new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) + .setFilter(QueryBuilders.rangeQuery(fieldName).gte(DateUtils.toInstant(10))) + .build() + ); + } else if (fieldType.typeName().equals(BooleanFieldMapper.CONTENT_TYPE)) { + return new TTestAggregationBuilder("foo").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) + .setFilter(QueryBuilders.rangeQuery(fieldName).lt("true")) + .build() + ) + .b( + new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) + .setFilter(QueryBuilders.rangeQuery(fieldName).gte("false")) + .build() + ); + } // if it's "unsupported" just use matchall filters to avoid parsing issues - return new TTestAggregationBuilder("foo") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) - .setFilter(QueryBuilders.matchAllQuery()).build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) - .setFilter(QueryBuilders.matchAllQuery()).build()); + return new TTestAggregationBuilder("foo").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName).setFilter(QueryBuilders.matchAllQuery()).build() + ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName).setFilter(QueryBuilders.matchAllQuery()).build()); } @Override protected List getSupportedValuesSourceTypes() { - return List.of(CoreValuesSourceType.NUMERIC, - CoreValuesSourceType.BOOLEAN, - CoreValuesSourceType.DATE); + return List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.DATE); } @Override @@ -110,7 +121,7 @@ protected ScriptService getMockScriptService() { LeafDocLookup leafDocLookup = (LeafDocLookup) vars.get("doc"); String fieldname = (String) vars.get("fieldname"); ScriptDocValues scriptDocValues = leafDocLookup.get(fieldname); - return ((Number) scriptDocValues.get(0)).doubleValue() + 0.5; + return ((Number) scriptDocValues.get(0)).doubleValue() + 0.5; }); scripts.put(TERM_FILTERING, vars -> { @@ -124,9 +135,7 @@ protected ScriptService getMockScriptService() { return null; }); - MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, - scripts, - Collections.emptyMap()); + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, scripts, Collections.emptyMap()); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); @@ -140,9 +149,12 @@ public void testNoMatchingField() throws IOException { } public void testNotEnoughRecords() throws IOException { - testCase(new MatchAllDocsQuery(), randomFrom(TTestType.values()), iw -> { - iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89))); - }, tTest -> assertEquals(Double.NaN, tTest.getValue(), 0)); + testCase( + new MatchAllDocsQuery(), + randomFrom(TTestType.values()), + iw -> { iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89))); }, + tTest -> assertEquals(Double.NaN, tTest.getValue(), 0) + ); } public void testSameValues() throws IOException { @@ -168,42 +180,52 @@ public void testMatchesSortedNumericDocValues() throws IOException { } public void testMultiplePairedValues() { - AggregationExecutionException ex = expectThrows(AggregationExecutionException.class, () -> - testCase(new MatchAllDocsQuery(), TTestType.PAIRED, iw -> { - iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("a", 103), - new SortedNumericDocValuesField("b", 89))); + AggregationExecutionException ex = expectThrows( + AggregationExecutionException.class, + () -> testCase(new MatchAllDocsQuery(), TTestType.PAIRED, iw -> { + iw.addDocument( + asList( + new SortedNumericDocValuesField("a", 102), + new SortedNumericDocValuesField("a", 103), + new SortedNumericDocValuesField("b", 89) + ) + ); iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93))); }, tTest -> fail("Should have thrown exception")) ); assertEquals( "Encountered more than one value for a single document. Use a script to combine multiple values per doc into a single value.", - ex.getMessage()); + ex.getMessage() + ); } public void testSameFieldAndNoFilters() { TTestType tTestType = randomFrom(TTestType.values()); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.INTEGER); - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName("field").setMissing(100).build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("field").setMissing(100).build()) - .testType(tTestType); + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName("field").setMissing(100).build() + ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("field").setMissing(100).build()).testType(tTestType); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField("field", 102))); iw.addDocument(singleton(new SortedNumericDocValuesField("field", 99))); }, tTest -> fail("Should have thrown exception"), fieldType) ); - assertEquals( - "The same field [field] is used for both population but no filters are specified.", - ex.getMessage()); + assertEquals("The same field [field] is used for both population but no filters are specified.", ex.getMessage()); } public void testMultipleUnpairedValues() throws IOException { TTestType tTestType = randomFrom(TTestType.HETEROSCEDASTIC, TTestType.HOMOSCEDASTIC); testCase(new MatchAllDocsQuery(), tTestType, iw -> { - iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("a", 103), - new SortedNumericDocValuesField("b", 89))); + iw.addDocument( + asList( + new SortedNumericDocValuesField("a", 102), + new SortedNumericDocValuesField("a", 103), + new SortedNumericDocValuesField("b", 89) + ) + ); iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93))); }, tTest -> assertEquals(tTestType == TTestType.HETEROSCEDASTIC ? 0.0607303911 : 0.01718374671, tTest.getValue(), 0.000001)); } @@ -211,8 +233,13 @@ public void testMultipleUnpairedValues() throws IOException { public void testUnpairedValuesWithFilters() throws IOException { TTestType tTestType = randomFrom(TTestType.HETEROSCEDASTIC, TTestType.HOMOSCEDASTIC); testCase(new MatchAllDocsQuery(), tTestType, iw -> { - iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("a", 103), - new SortedNumericDocValuesField("b", 89))); + iw.addDocument( + asList( + new SortedNumericDocValuesField("a", 102), + new SortedNumericDocValuesField("a", 103), + new SortedNumericDocValuesField("b", 89) + ) + ); iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93))); }, tTest -> assertEquals(tTestType == TTestType.HETEROSCEDASTIC ? 0.0607303911 : 0.01718374671, tTest.getValue(), 0.000001)); } @@ -249,10 +276,9 @@ public void testUnmappedWithMissingField() throws IOException { boolean missB = missA == false || randomBoolean(); // at least one of the fields should be missing MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType(missA ? "not_a" : "a", NumberFieldMapper.NumberType.INTEGER); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(missB ? "not_b" : "b", NumberFieldMapper.NumberType.INTEGER); - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setMissing(100).build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").setMissing(100).build()) - .testType(tTestType); + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setMissing(100).build() + ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").setMissing(100).build()).testType(tTestType); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89))); iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 93))); @@ -309,21 +335,24 @@ public void testUnsupportedType() { } else { fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); } - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()) - .testType(tTestType); - - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("a", 103), - new SortedNumericDocValuesField("b", 89))); + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() + ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()).testType(tTestType); + + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument( + asList( + new SortedNumericDocValuesField("a", 102), + new SortedNumericDocValuesField("a", 103), + new SortedNumericDocValuesField("b", 89) + ) + ); iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93))); }, tTest -> fail("Should have thrown exception"), fieldType1, fieldType2) ); - assertEquals( - "Expected numeric type on field [" + (wrongA ? "a" : "b") + "], but got [keyword]", - ex.getMessage()); + assertEquals("Expected numeric type on field [" + (wrongA ? "a" : "b") + "], but got [keyword]", ex.getMessage()); } public void testBadMissingField() { @@ -342,8 +371,9 @@ public void testBadMissingField() { } TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(a.build()).b(b.build()).testType(tTestType); - NumberFormatException ex = expectThrows(NumberFormatException.class, () -> - testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + NumberFormatException ex = expectThrows( + NumberFormatException.class, + () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("b", 89))); iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93))); }, tTest -> fail("Should have thrown exception"), fieldType1, fieldType2) @@ -351,7 +381,6 @@ public void testBadMissingField() { assertEquals("For input string: \"bad_number\"", ex.getMessage()); } - public void testUnmappedWithBadMissingField() { TTestType tTestType = randomFrom(TTestType.values()); boolean missA = randomBoolean(); @@ -373,8 +402,9 @@ public void testUnmappedWithBadMissingField() { } TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(a.build()).b(b.build()).testType(tTestType); - NumberFormatException ex = expectThrows(NumberFormatException.class, () -> - testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { + NumberFormatException ex = expectThrows( + NumberFormatException.class, + () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("b", 89))); iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93))); }, tTest -> fail("Should have thrown exception"), fieldType1, fieldType2) @@ -387,31 +417,43 @@ public void testEmptyBucket() throws IOException { MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); MappedFieldType fieldTypePart = new NumberFieldMapper.NumberFieldType("part", NumberFieldMapper.NumberType.INTEGER); - HistogramAggregationBuilder histogram = new HistogramAggregationBuilder("histo").field("part").interval(10).minDocCount(0) - .subAggregation(new TTestAggregationBuilder("t_test") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()) - .testType(tTestType)); + HistogramAggregationBuilder histogram = new HistogramAggregationBuilder("histo").field("part") + .interval(10) + .minDocCount(0) + .subAggregation( + new TTestAggregationBuilder("t_test").a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()) + .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()) + .testType(tTestType) + ); testCase(histogram, new MatchAllDocsQuery(), iw -> { - iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89), - new NumericDocValuesField("part", 1))); - iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 93), - new NumericDocValuesField("part", 1))); - iw.addDocument(asList(new NumericDocValuesField("a", 111), new NumericDocValuesField("b", 72), - new NumericDocValuesField("part", 1))); - iw.addDocument(asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98), - new NumericDocValuesField("part", 21))); - iw.addDocument(asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102), - new NumericDocValuesField("part", 21))); - iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98), - new NumericDocValuesField("part", 21))); + iw.addDocument( + asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89), new NumericDocValuesField("part", 1)) + ); + iw.addDocument( + asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 93), new NumericDocValuesField("part", 1)) + ); + iw.addDocument( + asList(new NumericDocValuesField("a", 111), new NumericDocValuesField("b", 72), new NumericDocValuesField("part", 1)) + ); + iw.addDocument( + asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98), new NumericDocValuesField("part", 21)) + ); + iw.addDocument( + asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102), new NumericDocValuesField("part", 21)) + ); + iw.addDocument( + asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98), new NumericDocValuesField("part", 21)) + ); }, (Consumer) histo -> { assertEquals(3, histo.getBuckets().size()); assertNotNull(histo.getBuckets().get(0).getAggregations().asMap().get("t_test")); InternalTTest tTest = (InternalTTest) histo.getBuckets().get(0).getAggregations().asMap().get("t_test"); - assertEquals(tTestType == TTestType.PAIRED ? 0.1939778614 : - tTestType == TTestType.HOMOSCEDASTIC ? 0.05878871029 : 0.07529006595, tTest.getValue(), 0.000001); + assertEquals( + tTestType == TTestType.PAIRED ? 0.1939778614 : tTestType == TTestType.HOMOSCEDASTIC ? 0.05878871029 : 0.07529006595, + tTest.getValue(), + 0.000001 + ); assertNotNull(histo.getBuckets().get(1).getAggregations().asMap().get("t_test")); tTest = (InternalTTest) histo.getBuckets().get(1).getAggregations().asMap().get("t_test"); @@ -419,8 +461,11 @@ public void testEmptyBucket() throws IOException { assertNotNull(histo.getBuckets().get(2).getAggregations().asMap().get("t_test")); tTest = (InternalTTest) histo.getBuckets().get(2).getAggregations().asMap().get("t_test"); - assertEquals(tTestType == TTestType.PAIRED ? 0.6666666667 : - tTestType == TTestType.HOMOSCEDASTIC ? 0.8593081179 : 0.8594865044, tTest.getValue(), 0.000001); + assertEquals( + tTestType == TTestType.PAIRED ? 0.6666666667 : tTestType == TTestType.HOMOSCEDASTIC ? 0.8593081179 : 0.8594865044, + tTest.getValue(), + 0.000001 + ); }, fieldType1, fieldType2, fieldTypePart); } @@ -430,31 +475,35 @@ public void testFormatter() throws IOException { TTestType tTestType = randomFrom(TTestType.values()); MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()) - .testType(tTestType).format("0.00%"); + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() + ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()).testType(tTestType).format("0.00%"); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89))); iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 93))); iw.addDocument(asList(new NumericDocValuesField("a", 111), new NumericDocValuesField("b", 72))); }, (Consumer) tTest -> { - assertEquals(tTestType == TTestType.PAIRED ? 0.1939778614 : - tTestType == TTestType.HOMOSCEDASTIC ? 0.05878871029 : 0.07529006595, tTest.getValue(), 0.000001); - assertEquals(tTestType == TTestType.PAIRED ? "19.40%" : - tTestType == TTestType.HOMOSCEDASTIC ? "5.88%" : "7.53%", tTest.getValueAsString()); + assertEquals( + tTestType == TTestType.PAIRED ? 0.1939778614 : tTestType == TTestType.HOMOSCEDASTIC ? 0.05878871029 : 0.07529006595, + tTest.getValue(), + 0.000001 + ); + assertEquals( + tTestType == TTestType.PAIRED ? "19.40%" : tTestType == TTestType.HOMOSCEDASTIC ? "5.88%" : "7.53%", + tTest.getValueAsString() + ); }, fieldType1, fieldType2); } public void testGetProperty() throws IOException { MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); - GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global") - .subAggregation(new TTestAggregationBuilder("t_test") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()) + GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( + new TTestAggregationBuilder("t_test").a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()) .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()) - .testType(TTestType.PAIRED)); + .testType(TTestType.PAIRED) + ); testCase(globalBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(asList(new NumericDocValuesField("a", 102), new NumericDocValuesField("b", 89))); @@ -478,26 +527,30 @@ public void testScript() throws IOException { MultiValuesSourceFieldConfig a = new MultiValuesSourceFieldConfig.Builder().setFieldName("field").build(); MultiValuesSourceFieldConfig b = new MultiValuesSourceFieldConfig.Builder().setScript( - new Script(ScriptType.INLINE, MockScriptEngine.NAME, ADD_HALF_SCRIPT, Collections.singletonMap("fieldname", "field"))).build(); - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test"). - a(fieldInA ? a : b).b(fieldInA ? b : a).testType(tTestType); + new Script(ScriptType.INLINE, MockScriptEngine.NAME, ADD_HALF_SCRIPT, Collections.singletonMap("fieldname", "field")) + ).build(); + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(fieldInA ? a : b) + .b(fieldInA ? b : a) + .testType(tTestType); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("field", 1))); iw.addDocument(singleton(new NumericDocValuesField("field", 2))); iw.addDocument(singleton(new NumericDocValuesField("field", 3))); - }, (Consumer) tTest -> { - assertEquals(tTestType == TTestType.PAIRED ? 0 : 0.5733922538, tTest.getValue(), 0.000001); - }, fieldType); + }, + (Consumer) tTest -> { + assertEquals(tTestType == TTestType.PAIRED ? 0 : 0.5733922538, tTest.getValue(), 0.000001); + }, + fieldType + ); } public void testPaired() throws IOException { MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()) - .testType(TTestType.PAIRED); + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() + ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()).testType(TTestType.PAIRED); int tails = randomIntBetween(1, 2); if (tails == 1 || randomBoolean()) { aggregationBuilder.tails(tails); @@ -509,18 +562,15 @@ public void testPaired() throws IOException { iw.addDocument(asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98))); iw.addDocument(asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102))); iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98))); - }, (Consumer) ttest -> { - assertEquals(0.09571844217 * tails, ttest.getValue(), 0.00001); - }, fieldType1, fieldType2); + }, (Consumer) ttest -> { assertEquals(0.09571844217 * tails, ttest.getValue(), 0.00001); }, fieldType1, fieldType2); } public void testHomoscedastic() throws IOException { MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()) - .testType(TTestType.HOMOSCEDASTIC); + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() + ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()).testType(TTestType.HOMOSCEDASTIC); int tails = randomIntBetween(1, 2); if (tails == 1 || randomBoolean()) { aggregationBuilder.tails(tails); @@ -532,17 +582,15 @@ public void testHomoscedastic() throws IOException { iw.addDocument(asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98))); iw.addDocument(asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102))); iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98))); - }, (Consumer) ttest -> { - assertEquals(0.03928288693 * tails, ttest.getValue(), 0.00001); - }, fieldType1, fieldType2); + }, (Consumer) ttest -> { assertEquals(0.03928288693 * tails, ttest.getValue(), 0.00001); }, fieldType1, fieldType2); } public void testHeteroscedastic() throws IOException { MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()); + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() + ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()); if (randomBoolean()) { aggregationBuilder.testType(TTestType.HETEROSCEDASTIC); } @@ -557,17 +605,16 @@ public void testHeteroscedastic() throws IOException { iw.addDocument(asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98))); iw.addDocument(asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102))); iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98))); - }, (Consumer) ttest -> { - assertEquals(0.04538666214 * tails, ttest.getValue(), 0.00001); - }, fieldType1, fieldType2); + }, (Consumer) ttest -> { assertEquals(0.04538666214 * tails, ttest.getValue(), 0.00001); }, fieldType1, fieldType2); } public void testFiltered() throws IOException { TTestType tTestType = randomFrom(TTestType.values()); MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setFilter(QueryBuilders.termQuery("b", 1)).build()) + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setFilter(QueryBuilders.termQuery("b", 1)).build() + ) .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setFilter(QueryBuilders.termQuery("b", 2)).build()) .testType(tTestType); int tails = randomIntBetween(1, 2); @@ -597,9 +644,16 @@ public void testFiltered() throws IOException { iw.addDocument(asList(new NumericDocValuesField("a", 198), new IntPoint("b", 3))); }; if (tTestType == TTestType.PAIRED) { - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - testCase(aggregationBuilder, new MatchAllDocsQuery(), buildIndex, tTest -> fail("Should have thrown exception"), - fieldType1, fieldType2) + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> testCase( + aggregationBuilder, + new MatchAllDocsQuery(), + buildIndex, + tTest -> fail("Should have thrown exception"), + fieldType1, + fieldType2 + ) ); assertEquals("Paired t-test doesn't support filters", ex.getMessage()); } else { @@ -622,15 +676,15 @@ public void testFilterByFilterOrScript() throws IOException { boolean filterTermOne = randomBoolean(); - MultiValuesSourceFieldConfig.Builder a = new MultiValuesSourceFieldConfig.Builder().setFieldName("field").setFilter( - QueryBuilders.termQuery("term", filterTermOne? 1 : 2) - ); + MultiValuesSourceFieldConfig.Builder a = new MultiValuesSourceFieldConfig.Builder().setFieldName("field") + .setFilter(QueryBuilders.termQuery("term", filterTermOne ? 1 : 2)); MultiValuesSourceFieldConfig.Builder b = new MultiValuesSourceFieldConfig.Builder().setScript( - new Script(ScriptType.INLINE, MockScriptEngine.NAME, TERM_FILTERING, Collections.singletonMap("term", filterTermOne? 2 : 1)) + new Script(ScriptType.INLINE, MockScriptEngine.NAME, TERM_FILTERING, Collections.singletonMap("term", filterTermOne ? 2 : 1)) ); - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test"). - a(fieldInA ? a.build() : b.build()).b(fieldInA ? b.build() : a.build()).testType(tTestType); + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a(fieldInA ? a.build() : b.build()) + .b(fieldInA ? b.build() : a.build()) + .testType(tTestType); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(asList(new NumericDocValuesField("field", 1), new IntPoint("term", 1), new NumericDocValuesField("term", 1))); @@ -640,20 +694,21 @@ public void testFilterByFilterOrScript() throws IOException { iw.addDocument(asList(new NumericDocValuesField("field", 4), new IntPoint("term", 2), new NumericDocValuesField("term", 2))); iw.addDocument(asList(new NumericDocValuesField("field", 5), new IntPoint("term", 2), new NumericDocValuesField("term", 2))); iw.addDocument(asList(new NumericDocValuesField("field", 6), new IntPoint("term", 2), new NumericDocValuesField("term", 2))); - }, (Consumer) tTest -> { - assertEquals(0.02131164113, tTest.getValue(), 0.000001); - }, fieldType1, fieldType2); + }, (Consumer) tTest -> { assertEquals(0.02131164113, tTest.getValue(), 0.000001); }, fieldType1, fieldType2); } - private void testCase(Query query, TTestType type, - CheckedConsumer buildIndex, - Consumer verify) throws IOException { + private void testCase( + Query query, + TTestType type, + CheckedConsumer buildIndex, + Consumer verify + ) throws IOException { MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); - TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test") - .a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()) - .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()); + TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( + new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() + ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()); if (type != TTestType.HETEROSCEDASTIC || randomBoolean()) { aggregationBuilder.testType(type); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java index c775da4aecb4f..3858a99d88868 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java @@ -43,11 +43,11 @@ import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.core.rollup.action.GetRollupJobsAction; import org.elasticsearch.xpack.core.rollup.action.PutRollupJobAction; +import org.elasticsearch.xpack.core.rollup.action.RollupAction; import org.elasticsearch.xpack.core.rollup.action.RollupIndexerAction; import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction; import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.StopRollupJobAction; -import org.elasticsearch.xpack.core.rollup.action.RollupAction; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.rollup.action.TransportDeleteRollupJobAction; import org.elasticsearch.xpack.rollup.action.TransportGetRollupCapsAction; @@ -101,29 +101,44 @@ public Rollup(Settings settings) { } @Override - public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, ScriptService scriptService, - NamedXContentRegistry xContentRegistry, Environment environment, - NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier) { + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver expressionResolver, + Supplier repositoriesServiceSupplier + ) { return emptyList(); } @Override - public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster) { - List handlers = new ArrayList<>(Arrays.asList( - new RestRollupSearchAction(), - new RestPutRollupJobAction(), - new RestStartRollupJobAction(), - new RestStopRollupJobAction(), - new RestDeleteRollupJobAction(), - new RestGetRollupJobsAction(), - new RestGetRollupCapsAction(), - new RestGetRollupIndexCapsAction())); + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { + List handlers = new ArrayList<>( + Arrays.asList( + new RestRollupSearchAction(), + new RestPutRollupJobAction(), + new RestStartRollupJobAction(), + new RestStopRollupJobAction(), + new RestDeleteRollupJobAction(), + new RestGetRollupJobsAction(), + new RestGetRollupCapsAction(), + new RestGetRollupIndexCapsAction() + ) + ); if (RollupV2.isEnabled()) { handlers.add(new RestRollupAction()); @@ -134,17 +149,20 @@ public List getRestHandlers(Settings settings, RestController restC @Override public List> getActions() { - List> actions = new ArrayList<>(Arrays.asList( - new ActionHandler<>(RollupSearchAction.INSTANCE, TransportRollupSearchAction.class), - new ActionHandler<>(PutRollupJobAction.INSTANCE, TransportPutRollupJobAction.class), - new ActionHandler<>(StartRollupJobAction.INSTANCE, TransportStartRollupAction.class), - new ActionHandler<>(StopRollupJobAction.INSTANCE, TransportStopRollupAction.class), - new ActionHandler<>(DeleteRollupJobAction.INSTANCE, TransportDeleteRollupJobAction.class), - new ActionHandler<>(GetRollupJobsAction.INSTANCE, TransportGetRollupJobAction.class), - new ActionHandler<>(GetRollupCapsAction.INSTANCE, TransportGetRollupCapsAction.class), - new ActionHandler<>(GetRollupIndexCapsAction.INSTANCE, TransportGetRollupIndexCapsAction.class), - new ActionHandler<>(XPackUsageFeatureAction.ROLLUP, RollupUsageTransportAction.class), - new ActionHandler<>(XPackInfoFeatureAction.ROLLUP, RollupInfoTransportAction.class))); + List> actions = new ArrayList<>( + Arrays.asList( + new ActionHandler<>(RollupSearchAction.INSTANCE, TransportRollupSearchAction.class), + new ActionHandler<>(PutRollupJobAction.INSTANCE, TransportPutRollupJobAction.class), + new ActionHandler<>(StartRollupJobAction.INSTANCE, TransportStartRollupAction.class), + new ActionHandler<>(StopRollupJobAction.INSTANCE, TransportStopRollupAction.class), + new ActionHandler<>(DeleteRollupJobAction.INSTANCE, TransportDeleteRollupJobAction.class), + new ActionHandler<>(GetRollupJobsAction.INSTANCE, TransportGetRollupJobAction.class), + new ActionHandler<>(GetRollupCapsAction.INSTANCE, TransportGetRollupCapsAction.class), + new ActionHandler<>(GetRollupIndexCapsAction.INSTANCE, TransportGetRollupIndexCapsAction.class), + new ActionHandler<>(XPackUsageFeatureAction.ROLLUP, RollupUsageTransportAction.class), + new ActionHandler<>(XPackInfoFeatureAction.ROLLUP, RollupInfoTransportAction.class) + ) + ); if (RollupV2.isEnabled()) { actions.add(new ActionHandler<>(RollupIndexerAction.INSTANCE, TransportRollupIndexerAction.class)); @@ -156,18 +174,26 @@ public List getRestHandlers(Settings settings, RestController restC @Override public List> getExecutorBuilders(Settings settings) { - FixedExecutorBuilder indexing = new FixedExecutorBuilder(settings, Rollup.TASK_THREAD_POOL_NAME, - 1, -1, "xpack.rollup.task_thread_pool", false); + FixedExecutorBuilder indexing = new FixedExecutorBuilder( + settings, + Rollup.TASK_THREAD_POOL_NAME, + 1, + -1, + "xpack.rollup.task_thread_pool", + false + ); return Collections.singletonList(indexing); } @Override - public List> getPersistentTasksExecutor(ClusterService clusterService, - ThreadPool threadPool, - Client client, - SettingsModule settingsModule, - IndexNameExpressionResolver expressionResolver) { + public List> getPersistentTasksExecutor( + ClusterService clusterService, + ThreadPool threadPool, + Client client, + SettingsModule settingsModule, + IndexNameExpressionResolver expressionResolver + ) { schedulerEngine.set(new SchedulerEngine(settings, getClock())); return Collections.singletonList(new RollupJobTask.RollupJobPersistentTasksExecutor(client, schedulerEngine.get(), threadPool)); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java index 1d8b18a470a05..3e083f88d9bd3 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java @@ -81,11 +81,16 @@ private static void doFindBestJobs(AggregationBuilder source, List agg : fieldCaps.getAggs()) { if (agg.get(RollupField.AGG).equals(DateHistogramAggregationBuilder.NAME)) { - DateHistogramInterval interval = new DateHistogramInterval((String)agg.get(RollupField.INTERVAL)); + DateHistogramInterval interval = new DateHistogramInterval((String) agg.get(RollupField.INTERVAL)); ZoneId thisTimezone = ZoneId.of(((String) agg.get(DateHistogramGroupConfig.TIME_ZONE)), ZoneId.SHORT_IDS); ZoneId sourceTimeZone = source.timeZone() == null @@ -120,11 +125,14 @@ private static void checkDateHisto(DateHistogramAggregationBuilder source, List< */ DateHistogramInterval configCalendarInterval = agg.get(CALENDAR_INTERVAL) != null - ? new DateHistogramInterval((String) agg.get(CALENDAR_INTERVAL)) : null; + ? new DateHistogramInterval((String) agg.get(CALENDAR_INTERVAL)) + : null; DateHistogramInterval configFixedInterval = agg.get(FIXED_INTERVAL) != null - ? new DateHistogramInterval((String) agg.get(FIXED_INTERVAL)) : null; + ? new DateHistogramInterval((String) agg.get(FIXED_INTERVAL)) + : null; DateHistogramInterval configLegacyInterval = agg.get(INTERVAL) != null - ? new DateHistogramInterval((String) agg.get(INTERVAL)) : null; + ? new DateHistogramInterval((String) agg.get(INTERVAL)) + : null; // If histo used calendar_interval explicitly if (source.getCalendarInterval() != null) { @@ -165,8 +173,9 @@ private static void checkDateHisto(DateHistogramAggregationBuilder source, List< } else { // This _should not_ happen, but if miraculously it does we need to just quit - throw new IllegalArgumentException("An interval of some variety must be configured on " + - "the date_histogram aggregation."); + throw new IllegalArgumentException( + "An interval of some variety must be configured on " + "the date_histogram aggregation." + ); } // If we get here nothing matched, and we can break out break; @@ -176,8 +185,13 @@ private static void checkDateHisto(DateHistogramAggregationBuilder source, List< } if (localCaps.isEmpty()) { - throw new IllegalArgumentException("There is not a rollup job that has a [" + source.getWriteableName() + "] agg on field [" + - source.field() + "] which also satisfies all requirements of query."); + throw new IllegalArgumentException( + "There is not a rollup job that has a [" + + source.getWriteableName() + + "] agg on field [" + + source.field() + + "] which also satisfies all requirements of query." + ); } // We are a leaf, save our best caps @@ -203,13 +217,12 @@ static String retrieveInterval(Map agg) { return interval; } - static boolean validateCalendarInterval(DateHistogramInterval requestInterval, - DateHistogramInterval configInterval) { + static boolean validateCalendarInterval(DateHistogramInterval requestInterval, DateHistogramInterval configInterval) { if (requestInterval == null || configInterval == null) { return false; } - // The request must be gte the config. The CALENDAR_ORDERING map values are integers representing + // The request must be gte the config. The CALENDAR_ORDERING map values are integers representing // relative orders between the calendar units Rounding.DateTimeUnit requestUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(requestInterval.toString()); if (requestUnit == null) { @@ -233,10 +246,8 @@ static boolean validateFixedInterval(DateHistogramInterval requestInterval, Date } // Both are fixed, good to convert to millis now - long configIntervalMillis = TimeValue.parseTimeValue(configInterval.toString(), - "date_histo.config.interval").getMillis(); - long requestIntervalMillis = TimeValue.parseTimeValue(requestInterval.toString(), - "date_histo.request.interval").getMillis(); + long configIntervalMillis = TimeValue.parseTimeValue(configInterval.toString(), "date_histo.config.interval").getMillis(); + long requestIntervalMillis = TimeValue.parseTimeValue(requestInterval.toString(), "date_histo.request.interval").getMillis(); // Must be a multiple and gte the config return requestIntervalMillis >= configIntervalMillis && requestIntervalMillis % configIntervalMillis == 0; @@ -264,8 +275,13 @@ private static void checkHisto(HistogramAggregationBuilder source, List source, List jobCaps, - Set bestCaps) { + private static void checkVSLeaf( + ValuesSourceAggregationBuilder.LeafOnly source, + List jobCaps, + Set bestCaps + ) { ArrayList localCaps = new ArrayList<>(); for (RollupJobCaps cap : jobCaps) { RollupJobCaps.RollupFieldCaps fieldCaps = cap.getFieldCaps().get(source.field()); @@ -329,8 +353,13 @@ private static void checkVSLeaf(ValuesSourceAggregationBuilder.LeafOnly sou } if (localCaps.isEmpty()) { - throw new IllegalArgumentException("There is not a rollup job that has a [" + source.getWriteableName() + "] agg with name [" + - source.getName() + "] which also satisfies all requirements of query."); + throw new IllegalArgumentException( + "There is not a rollup job that has a [" + + source.getWriteableName() + + "] agg with name [" + + source.getName() + + "] which also satisfies all requirements of query." + ); } // Metrics are always leaves so go ahead and add to best caps @@ -410,8 +439,8 @@ private static Comparator getComparator() { // If dates are the same, the "smaller" job is the one with a larger histo avg histo weight. // Not bullet proof, but heuristically we prefer: - // - one job with interval 100 (avg 100) over one job with interval 10 (avg 10) - // - one job with interval 100 (avg 100) over one job with ten histos @ interval 10 (avg 10) + // - one job with interval 100 (avg 100) over one job with interval 10 (avg 10) + // - one job with interval 100 (avg 100) over one job with ten histos @ interval 10 (avg 10) // because in both cases the larger intervals likely generate fewer documents // // The exception is if one of jobs had no histo (avg 0) then we prefer that diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java index cb7c6051f34ca..75aa06e15b754 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.rollup; - import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; @@ -28,7 +27,6 @@ import java.util.List; import java.util.function.Supplier; - /** * This class provides a number of static utilities that help convert a non-rollup * aggregation into an aggregation that conforms to the rollup conventions @@ -128,13 +126,18 @@ public static List translateAggregation(AggregationBuilder s } else if (source.getWriteableName().equals(HistogramAggregationBuilder.NAME)) { return translateHistogram((HistogramAggregationBuilder) source, registry); } else if (RollupField.SUPPORTED_METRICS.contains(source.getWriteableName())) { - return translateVSLeaf((ValuesSourceAggregationBuilder.LeafOnly)source, registry); + return translateVSLeaf((ValuesSourceAggregationBuilder.LeafOnly) source, registry); } else if (source.getWriteableName().equals(TermsAggregationBuilder.NAME)) { - return translateTerms((TermsAggregationBuilder)source, registry); + return translateTerms((TermsAggregationBuilder) source, registry); } else { - throw new IllegalArgumentException("Unable to translate aggregation tree into Rollup. Aggregation [" - + source.getName() + "] is of type [" + source.getClass().getSimpleName() + "] which is " + - "currently unsupported."); + throw new IllegalArgumentException( + "Unable to translate aggregation tree into Rollup. Aggregation [" + + source.getName() + + "] is of type [" + + source.getClass().getSimpleName() + + "] which is " + + "currently unsupported." + ); } } @@ -193,12 +196,13 @@ public static List translateAggregation(AggregationBuilder s * * */ - private static List translateDateHistogram(DateHistogramAggregationBuilder source, - NamedWriteableRegistry registry) { + private static List translateDateHistogram( + DateHistogramAggregationBuilder source, + NamedWriteableRegistry registry + ) { return translateVSAggBuilder(source, registry, () -> { - DateHistogramAggregationBuilder rolledDateHisto - = new DateHistogramAggregationBuilder(source.getName()); + DateHistogramAggregationBuilder rolledDateHisto = new DateHistogramAggregationBuilder(source.getName()); if (source.getCalendarInterval() != null) { rolledDateHisto.calendarInterval(source.getCalendarInterval()); @@ -233,12 +237,10 @@ private static List translateDateHistogram(DateHistogramAggr * {@link #translateDateHistogram(DateHistogramAggregationBuilder, NamedWriteableRegistry)} for * a complete list of conventions, examples, etc */ - private static List translateHistogram(HistogramAggregationBuilder source, - NamedWriteableRegistry registry) { + private static List translateHistogram(HistogramAggregationBuilder source, NamedWriteableRegistry registry) { return translateVSAggBuilder(source, registry, () -> { - HistogramAggregationBuilder rolledHisto - = new HistogramAggregationBuilder(source.getName()); + HistogramAggregationBuilder rolledHisto = new HistogramAggregationBuilder(source.getName()); rolledHisto.interval(source.interval()); rolledHisto.offset(source.offset()); @@ -308,12 +310,10 @@ private static List translateHistogram(HistogramAggregationB * * */ - private static List translateTerms(TermsAggregationBuilder source, - NamedWriteableRegistry registry) { + private static List translateTerms(TermsAggregationBuilder source, NamedWriteableRegistry registry) { return translateVSAggBuilder(source, registry, () -> { - TermsAggregationBuilder rolledTerms - = new TermsAggregationBuilder(source.getName()); + TermsAggregationBuilder rolledTerms = new TermsAggregationBuilder(source.getName()); if (source.userValueTypeHint() != null) { rolledTerms.userValueTypeHint(source.userValueTypeHint()); } @@ -351,8 +351,11 @@ private static List translateTerms(TermsAggregationBuilder s * @param The type of ValueSourceAggBuilder that we are working with * @return the translated multi-bucket ValueSourceAggBuilder */ - private static > List - translateVSAggBuilder(T source, NamedWriteableRegistry registry, Supplier factory) { + private static > List translateVSAggBuilder( + T source, + NamedWriteableRegistry registry, + Supplier factory + ) { T rolled = factory.get(); @@ -367,8 +370,11 @@ private static List translateTerms(TermsAggregationBuilder s // Count is derived from a sum, e.g. // "my_date_histo._count": { "sum": { "field": "foo.date_histogram._count" } } } - rolled.subAggregation(new SumAggregationBuilder(RollupField.formatCountAggName(source.getName())) - .field(RollupField.formatFieldName(source, RollupField.COUNT_FIELD))); + rolled.subAggregation( + new SumAggregationBuilder(RollupField.formatCountAggName(source.getName())).field( + RollupField.formatFieldName(source, RollupField.COUNT_FIELD) + ) + ); return Collections.singletonList(rolled); } @@ -443,8 +449,10 @@ private static List translateTerms(TermsAggregationBuilder s * most of the leafs to easily clone them * @return The translated leaf aggregation */ - private static List translateVSLeaf(ValuesSourceAggregationBuilder.LeafOnly metric, - NamedWriteableRegistry registry) { + private static List translateVSLeaf( + ValuesSourceAggregationBuilder.LeafOnly metric, + NamedWriteableRegistry registry + ) { List rolledMetrics; @@ -461,8 +469,11 @@ private static List translateVSLeaf(ValuesSourceAggregationB // Count is derived from a sum, e.g. // "the_avg._count": { "sum" : { "field" : "some_field.avg._count" }} - rolledMetrics.add(new SumAggregationBuilder(RollupField.formatCountAggName(metric.getName())) - .field(RollupField.formatFieldName(metric, RollupField.COUNT_FIELD))); + rolledMetrics.add( + new SumAggregationBuilder(RollupField.formatCountAggName(metric.getName())).field( + RollupField.formatFieldName(metric, RollupField.COUNT_FIELD) + ) + ); return rolledMetrics; } @@ -473,13 +484,14 @@ private static List translateVSLeaf(ValuesSourceAggregationB try { output.writeString(metric.getType()); metric.writeTo(output); - try (StreamInput stream = output.bytes().streamInput(); - NamedWriteableAwareStreamInput in = - new NamedWriteableAwareStreamInput(stream, registry)) { + try ( + StreamInput stream = output.bytes().streamInput(); + NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(stream, registry) + ) { - ValuesSourceAggregationBuilder serialized - = ((ValuesSourceAggregationBuilder)in.readNamedWriteable(AggregationBuilder.class)) - .field(RollupField.formatFieldName(metric, RollupField.VALUE)); + ValuesSourceAggregationBuilder serialized = ((ValuesSourceAggregationBuilder) in.readNamedWriteable( + AggregationBuilder.class + )).field(RollupField.formatFieldName(metric, RollupField.VALUE)); return Collections.singletonList(serialized); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index 1ccd48c512ea2..98796ea064403 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -46,7 +46,6 @@ import java.util.Objects; import java.util.stream.Collectors; - /** * This class contains static utilities that combine the responses from an msearch * with rollup + non-rollup agg trees into a single, regular search response @@ -74,8 +73,10 @@ public static SearchResponse verifyResponse(MultiSearchResponse.Item normalRespo * See {@link #combineResponses(MultiSearchResponse.Item[], InternalAggregation.ReduceContext)} for more details * on the translation conventions */ - public static SearchResponse translateResponse(MultiSearchResponse.Item[] rolledMsearch, - InternalAggregation.ReduceContext reduceContext) throws Exception { + public static SearchResponse translateResponse( + MultiSearchResponse.Item[] rolledMsearch, + InternalAggregation.ReduceContext reduceContext + ) throws Exception { assert rolledMsearch.length > 0; List responses = new ArrayList<>(); @@ -85,9 +86,12 @@ public static SearchResponse translateResponse(MultiSearchResponse.Item[] rolled // If an index was deleted after execution, give a hint to the user that this is a transient error if (e instanceof IndexNotFoundException) { - throw new ResourceNotFoundException("Index [" + ((IndexNotFoundException) e).getIndex().getName() - + "] was not found, likely because it was deleted while the request was in-flight. " + - "Rollup does not support partial search results, please try the request again."); + throw new ResourceNotFoundException( + "Index [" + + ((IndexNotFoundException) e).getIndex().getName() + + "] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again." + ); } // Otherwise just throw @@ -198,8 +202,10 @@ public static SearchResponse translateResponse(MultiSearchResponse.Item[] rolled * * @param msearchResponses The responses from the msearch, where the first response is the live-index response */ - public static SearchResponse combineResponses(MultiSearchResponse.Item[] msearchResponses, - InternalAggregation.ReduceContext reduceContext) throws Exception { + public static SearchResponse combineResponses( + MultiSearchResponse.Item[] msearchResponses, + InternalAggregation.ReduceContext reduceContext + ) throws Exception { assert msearchResponses.length >= 2; @@ -212,9 +218,14 @@ public static SearchResponse combineResponses(MultiSearchResponse.Item[] msearch // If an index was deleted after execution, give a hint to the user that this is a transient error if (e instanceof IndexNotFoundException) { - throw new ResourceNotFoundException("Index [" + ((IndexNotFoundException) e).getIndex() + "] was not found, " + - "likely because it was deleted while the request was in-flight. Rollup does not support " + - "partial search results, please try the request again.", e); + throw new ResourceNotFoundException( + "Index [" + + ((IndexNotFoundException) e).getIndex() + + "] was not found, " + + "likely because it was deleted while the request was in-flight. Rollup does not support " + + "partial search results, please try the request again.", + e + ); } // Otherwise just throw @@ -230,7 +241,7 @@ public static SearchResponse combineResponses(MultiSearchResponse.Item[] msearch first = false; } - // If we only have a live index left, just return it directly. We know it can't be an error already + // If we only have a live index left, just return it directly. We know it can't be an error already if (rolledResponses.isEmpty() && liveResponse != null) { return liveResponse; } else if (rolledResponses.isEmpty()) { @@ -240,12 +251,15 @@ public static SearchResponse combineResponses(MultiSearchResponse.Item[] msearch return doCombineResponse(liveResponse, rolledResponses, reduceContext); } - private static SearchResponse doCombineResponse(SearchResponse liveResponse, List rolledResponses, - InternalAggregation.ReduceContext reduceContext) { + private static SearchResponse doCombineResponse( + SearchResponse liveResponse, + List rolledResponses, + InternalAggregation.ReduceContext reduceContext + ) { final InternalAggregations liveAggs = liveResponse != null - ? (InternalAggregations)liveResponse.getAggregations() - : InternalAggregations.EMPTY; + ? (InternalAggregations) liveResponse.getAggregations() + : InternalAggregations.EMPTY; int missingRollupAggs = rolledResponses.stream().mapToInt(searchResponse -> { if (searchResponse == null @@ -261,7 +275,7 @@ private static SearchResponse doCombineResponse(SearchResponse liveResponse, Lis // Return an empty response, but make sure we include all the shard, failure, etc stats return mergeFinalResponse(liveResponse, rolledResponses, InternalAggregations.EMPTY); } else if (missingRollupAggs > 0 && missingRollupAggs != rolledResponses.size()) { - // We were missing some but not all the aggs, unclear how to handle this. Bail. + // We were missing some but not all the aggs, unclear how to handle this. Bail. throw new RuntimeException("Expected to find aggregations in rollup response, but none found."); } @@ -270,19 +284,23 @@ private static SearchResponse doCombineResponse(SearchResponse liveResponse, Lis // it was a result from another shard InternalAggregations currentTree = InternalAggregations.EMPTY; InternalAggregation.ReduceContext finalReduceContext = InternalAggregation.ReduceContext.forFinalReduction( - reduceContext.bigArrays(), reduceContext.scriptService(), b -> {}, PipelineTree.EMPTY); + reduceContext.bigArrays(), + reduceContext.scriptService(), + b -> {}, + PipelineTree.EMPTY + ); for (SearchResponse rolledResponse : rolledResponses) { List unrolledAggs = new ArrayList<>(rolledResponse.getAggregations().asList().size()); for (Aggregation agg : rolledResponse.getAggregations()) { // We expect a filter agg here because the rollup convention is that all translated aggs - // will start with a filter, containing various agg-specific predicates. If there + // will start with a filter, containing various agg-specific predicates. If there // *isn't* a filter agg here, something has gone very wrong! if ((agg instanceof InternalFilter) == false) { - throw new RuntimeException("Expected [" +agg.getName() - + "] to be a FilterAggregation, but was [" - + agg.getClass().getSimpleName() + "]"); + throw new RuntimeException( + "Expected [" + agg.getName() + "] to be a FilterAggregation, but was [" + agg.getClass().getSimpleName() + "]" + ); } - unrolledAggs.addAll(unrollAgg(((InternalFilter)agg).getAggregations(), liveAggs, currentTree)); + unrolledAggs.addAll(unrollAgg(((InternalFilter) agg).getAggregations(), liveAggs, currentTree)); } // Iteratively merge in each new set of unrolled aggs, so that we can identify/fix overlapping doc_counts @@ -301,13 +319,16 @@ private static SearchResponse doCombineResponse(SearchResponse liveResponse, Lis return mergeFinalResponse(liveResponse, rolledResponses, currentTree); } - private static SearchResponse mergeFinalResponse(SearchResponse liveResponse, List rolledResponses, - InternalAggregations aggs) { + private static SearchResponse mergeFinalResponse( + SearchResponse liveResponse, + List rolledResponses, + InternalAggregations aggs + ) { int totalShards = rolledResponses.stream().mapToInt(SearchResponse::getTotalShards).sum(); int sucessfulShards = rolledResponses.stream().mapToInt(SearchResponse::getSuccessfulShards).sum(); int skippedShards = rolledResponses.stream().mapToInt(SearchResponse::getSkippedShards).sum(); - long took = rolledResponses.stream().mapToLong(r -> r.getTook().getMillis()).sum() ; + long took = rolledResponses.stream().mapToLong(r -> r.getTook().getMillis()).sum(); boolean isTimedOut = rolledResponses.stream().anyMatch(SearchResponse::isTimedOut); boolean isTerminatedEarly = rolledResponses.stream() @@ -325,12 +346,27 @@ private static SearchResponse mergeFinalResponse(SearchResponse liveResponse, Li numReducePhases += liveResponse.getNumReducePhases(); } - InternalSearchResponse combinedInternal = new InternalSearchResponse(SearchHits.empty(), aggs, null, null, - isTimedOut, isTerminatedEarly, numReducePhases); + InternalSearchResponse combinedInternal = new InternalSearchResponse( + SearchHits.empty(), + aggs, + null, + null, + isTimedOut, + isTerminatedEarly, + numReducePhases + ); // Shard failures are ignored atm, so returning an empty array is fine - return new SearchResponse(combinedInternal, null, totalShards, sucessfulShards, skippedShards, - took, ShardSearchFailure.EMPTY_ARRAY, rolledResponses.get(0).getClusters()); + return new SearchResponse( + combinedInternal, + null, + totalShards, + sucessfulShards, + skippedShards, + took, + ShardSearchFailure.EMPTY_ARRAY, + rolledResponses.get(0).getClusters() + ); } /** @@ -342,26 +378,27 @@ private static SearchResponse mergeFinalResponse(SearchResponse liveResponse, Li * * @return An unrolled aggregation that mimics the structure of `base`, allowing reduction */ - private static List unrollAgg(InternalAggregations rolled, InternalAggregations original, - InternalAggregations currentTree) { - return rolled.asList().stream() - .filter(subAgg -> subAgg.getName().endsWith("." + RollupField.COUNT_FIELD) == false) - .map(agg -> { - // During the translation process, some aggregations' doc_counts are stored in accessory - // `sum` metric aggs, so we may need to extract that. Unfortunately, structure of multibucket vs - // leaf metric is slightly different; multibucket count is stored per-bucket in a sub-agg, while - // metric is "next" to the metric as a sibling agg. - // - // So we only look for a count if this is not a multibucket, as multibuckets will handle - // the doc_count themselves on a per-bucket basis. - // - long count = -1; - if (agg instanceof InternalMultiBucketAggregation == false) { - count = getAggCount(agg, rolled.getAsMap()); - } + private static List unrollAgg( + InternalAggregations rolled, + InternalAggregations original, + InternalAggregations currentTree + ) { + return rolled.asList().stream().filter(subAgg -> subAgg.getName().endsWith("." + RollupField.COUNT_FIELD) == false).map(agg -> { + // During the translation process, some aggregations' doc_counts are stored in accessory + // `sum` metric aggs, so we may need to extract that. Unfortunately, structure of multibucket vs + // leaf metric is slightly different; multibucket count is stored per-bucket in a sub-agg, while + // metric is "next" to the metric as a sibling agg. + // + // So we only look for a count if this is not a multibucket, as multibuckets will handle + // the doc_count themselves on a per-bucket basis. + // + long count = -1; + if (agg instanceof InternalMultiBucketAggregation == false) { + count = getAggCount(agg, rolled.getAsMap()); + } - return unrollAgg((InternalAggregation)agg, original.get(agg.getName()), currentTree.get(agg.getName()), count); - }).collect(Collectors.toList()); + return unrollAgg((InternalAggregation) agg, original.get(agg.getName()), currentTree.get(agg.getName()), count); + }).collect(Collectors.toList()); } /** @@ -373,18 +410,30 @@ private static List unrollAgg(InternalAggregations rolled, * * @return An unrolled aggregation that mimics the structure of base, allowing reduction */ - protected static InternalAggregation unrollAgg(InternalAggregation rolled, InternalAggregation originalAgg, - InternalAggregation currentTree, long count) { + protected static InternalAggregation unrollAgg( + InternalAggregation rolled, + InternalAggregation originalAgg, + InternalAggregation currentTree, + long count + ) { if (rolled instanceof InternalMultiBucketAggregation) { - return unrollMultiBucket((InternalMultiBucketAggregation) rolled, (InternalMultiBucketAggregation) originalAgg, - (InternalMultiBucketAggregation) currentTree); + return unrollMultiBucket( + (InternalMultiBucketAggregation) rolled, + (InternalMultiBucketAggregation) originalAgg, + (InternalMultiBucketAggregation) currentTree + ); } else if (rolled instanceof SingleValue) { return unrollMetric((SingleValue) rolled, count); } else { - throw new RuntimeException("Unable to unroll aggregation tree. Aggregation [" - + rolled.getName() + "] is of type [" + rolled.getClass().getSimpleName() + "] which is " + - "currently unsupported."); + throw new RuntimeException( + "Unable to unroll aggregation tree. Aggregation [" + + rolled.getName() + + "] is of type [" + + rolled.getClass().getSimpleName() + + "] which is " + + "currently unsupported." + ); } } @@ -393,25 +442,33 @@ protected static InternalAggregation unrollAgg(InternalAggregation rolled, Inter * called by other internal methods in this class, rather than directly calling the per-type methods. */ @SuppressWarnings({ "unchecked", "rawtypes" }) - private static InternalAggregation unrollMultiBucket(InternalMultiBucketAggregation rolled, InternalMultiBucketAggregation original, - InternalMultiBucketAggregation currentTree) { + private static InternalAggregation unrollMultiBucket( + InternalMultiBucketAggregation rolled, + InternalMultiBucketAggregation original, + InternalMultiBucketAggregation currentTree + ) { // The only thing unique between all the multibucket agg is the type of bucket they // need, so this if/else simply creates specialized closures that return the appropriate - // bucket type. Otherwise the heavy-lifting is in + // bucket type. Otherwise the heavy-lifting is in // {@link #unrollMultiBucket(InternalMultiBucketAggregation, InternalMultiBucketAggregation, TriFunction)} if (rolled instanceof InternalDateHistogram) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { long key = ((InternalDateHistogram) rolled).getKey(bucket).longValue(); - DocValueFormat formatter = ((InternalDateHistogram.Bucket)bucket).getFormatter(); + DocValueFormat formatter = ((InternalDateHistogram.Bucket) bucket).getFormatter(); assert bucketCount >= 0; - return new InternalDateHistogram.Bucket(key, bucketCount, - ((InternalDateHistogram.Bucket) bucket).getKeyed(), formatter, subAggs); + return new InternalDateHistogram.Bucket( + key, + bucketCount, + ((InternalDateHistogram.Bucket) bucket).getKeyed(), + formatter, + subAggs + ); }); } else if (rolled instanceof InternalHistogram) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { long key = ((InternalHistogram) rolled).getKey(bucket).longValue(); - DocValueFormat formatter = ((InternalHistogram.Bucket)bucket).getFormatter(); + DocValueFormat formatter = ((InternalHistogram.Bucket) bucket).getFormatter(); assert bucketCount >= 0; return new InternalHistogram.Bucket(key, bucketCount, ((InternalHistogram.Bucket) bucket).getKeyed(), formatter, subAggs); }); @@ -420,20 +477,25 @@ private static InternalAggregation unrollMultiBucket(InternalMultiBucketAggregat BytesRef key = new BytesRef(bucket.getKeyAsString().getBytes(StandardCharsets.UTF_8)); assert bucketCount >= 0; - //TODO expose getFormatter(), keyed upstream in Core + // TODO expose getFormatter(), keyed upstream in Core return new StringTerms.Bucket(key, bucketCount, subAggs, false, 0, DocValueFormat.RAW); }); } else if (rolled instanceof LongTerms) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { - long key = (long)bucket.getKey(); + long key = (long) bucket.getKey(); assert bucketCount >= 0; - //TODO expose getFormatter(), keyed upstream in Core + // TODO expose getFormatter(), keyed upstream in Core return new LongTerms.Bucket(key, bucketCount, subAggs, false, 0, DocValueFormat.RAW); }); } else { - throw new RuntimeException("Unable to unroll aggregation tree. Aggregation [" - + rolled.getName() + "] is of type [" + rolled.getClass().getSimpleName() + "] which is " + - "currently unsupported."); + throw new RuntimeException( + "Unable to unroll aggregation tree. Aggregation [" + + rolled.getName() + + "] is of type [" + + rolled.getClass().getSimpleName() + + "] which is " + + "currently unsupported." + ); } } @@ -445,11 +507,15 @@ private static InternalAggregation unrollMultiBucket(InternalMultiBucketAggregat * @param source The rolled aggregation that we wish to unroll * @param bucketFactory A Trifunction which generates new buckets for the given type of multibucket */ - private static , - B extends InternalBucket, - T extends InternalMultiBucketAggregation> - InternalAggregation unrollMultiBucket(T source, T original, T currentTree, - TriFunction bucketFactory) { + private static < + A extends InternalMultiBucketAggregation, + B extends InternalBucket, + T extends InternalMultiBucketAggregation> InternalAggregation unrollMultiBucket( + T source, + T original, + T currentTree, + TriFunction bucketFactory + ) { Map originalKeys = new HashMap<>(); Map currentKeys = new HashMap<>(); @@ -464,33 +530,36 @@ InternalAggregation unrollMultiBucket(T source, T original, T currentTree, // Iterate over the buckets in the multibucket List buckets = source.getBuckets() - .stream() - .filter(b -> originalKeys.containsKey(b.getKey()) == false) // If the original has this key, ignore the rolled version - .map(bucket -> { - - // Grab the value from the count agg (if it exists), which represents this bucket's doc_count - long bucketCount = getAggCount(source, bucket.getAggregations().getAsMap()); + .stream() + .filter(b -> originalKeys.containsKey(b.getKey()) == false) // If the original has this key, ignore the rolled version + .map(bucket -> { - // Don't generate buckets if the doc count is zero - if (bucketCount == 0) { - return null; - } + // Grab the value from the count agg (if it exists), which represents this bucket's doc_count + long bucketCount = getAggCount(source, bucket.getAggregations().getAsMap()); - // current, partially merged tree contains this key. Defer to the existing doc_count if it is non-zero - if (currentKeys.containsKey(bucket.getKey()) && currentKeys.get(bucket.getKey()).getDocCount() != 0) { - // Unlike above where we return null if doc_count is zero, we return a doc_count: 0 bucket - // here because it may have sub-aggs that need merging, whereas above the bucket was just empty/null - bucketCount = 0; - } + // Don't generate buckets if the doc count is zero + if (bucketCount == 0) { + return null; + } - // Then iterate over the subAggs in the bucket - InternalAggregations subAggs = unrollSubAggsFromMulti(bucket, originalKeys.get(bucket.getKey()), - currentKeys.get(bucket.getKey())); + // current, partially merged tree contains this key. Defer to the existing doc_count if it is non-zero + if (currentKeys.containsKey(bucket.getKey()) && currentKeys.get(bucket.getKey()).getDocCount() != 0) { + // Unlike above where we return null if doc_count is zero, we return a doc_count: 0 bucket + // here because it may have sub-aggs that need merging, whereas above the bucket was just empty/null + bucketCount = 0; + } - return bucketFactory.apply(bucket, bucketCount, subAggs); - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + // Then iterate over the subAggs in the bucket + InternalAggregations subAggs = unrollSubAggsFromMulti( + bucket, + originalKeys.get(bucket.getKey()), + currentKeys.get(bucket.getKey()) + ); + + return bucketFactory.apply(bucket, bucketCount, subAggs); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); return source.create(buckets); } @@ -501,8 +570,10 @@ InternalAggregation unrollMultiBucket(T source, T original, T currentTree, */ private static InternalAggregations unrollSubAggsFromMulti(InternalBucket bucket, InternalBucket original, InternalBucket currentTree) { // Iterate over the subAggs in each bucket - return InternalAggregations.from(bucket.getAggregations() - .asList().stream() + return InternalAggregations.from( + bucket.getAggregations() + .asList() + .stream() // Avoid any rollup count metrics, as that's not a true "sub-agg" but rather agg // added by the rollup for accounting purposes (e.g. doc_count) .filter(subAgg -> subAgg.getName().endsWith("." + RollupField.COUNT_FIELD) == false) @@ -521,7 +592,9 @@ private static InternalAggregations unrollSubAggsFromMulti(InternalBucket bucket } return unrollAgg((InternalAggregation) subAgg, originalSubAgg, currentSubAgg, count); - }).collect(Collectors.toList())); + }) + .collect(Collectors.toList()) + ); } private static InternalAggregation unrollMetric(SingleValue metric, long count) { @@ -536,14 +609,24 @@ private static InternalAggregation unrollMetric(SingleValue metric, long count) // If count is anything other than -1, this sum is actually an avg if (count != -1) { // Note: Avgs have a slightly different name to prevent collision with empty bucket defaults - return new InternalAvg(metric.getName().replace("." + RollupField.VALUE, ""), metric.value(), count, DocValueFormat.RAW, - metric.getMetadata()); + return new InternalAvg( + metric.getName().replace("." + RollupField.VALUE, ""), + metric.value(), + count, + DocValueFormat.RAW, + metric.getMetadata() + ); } return metric; } else { - throw new RuntimeException("Unable to unroll metric. Aggregation [" - + metric.getName() + "] is of type [" + metric.getClass().getSimpleName() + "] which is " + - "currently unsupported."); + throw new RuntimeException( + "Unable to unroll metric. Aggregation [" + + metric.getName() + + "] is of type [" + + metric.getClass().getSimpleName() + + "] which is " + + "currently unsupported." + ); } } @@ -551,8 +634,9 @@ private static long getAggCount(Aggregation agg, Map aggMap String countPath = null; if (agg.getType().equals(DateHistogramAggregationBuilder.NAME) - || agg.getType().equals(HistogramAggregationBuilder.NAME) - || agg.getType().equals(StringTerms.NAME) || agg.getType().equals(LongTerms.NAME)) { + || agg.getType().equals(HistogramAggregationBuilder.NAME) + || agg.getType().equals(StringTerms.NAME) + || agg.getType().equals(LongTerms.NAME)) { countPath = RollupField.formatCountAggName(agg.getName()); } else if (agg.getType().equals(SumAggregationBuilder.NAME)) { // Note: Avgs have a slightly different name to prevent collision with empty bucket defaults @@ -562,7 +646,7 @@ private static long getAggCount(Aggregation agg, Map aggMap if (countPath != null && aggMap.get(countPath) != null) { // we always set the count fields to Sum aggs, so this is safe assert aggMap.get(countPath) instanceof InternalSum; - return (long)((InternalSum) aggMap.get(countPath)).getValue(); + return (long) ((InternalSum) aggMap.get(countPath)).getValue(); } return -1; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java index fccb94ef71391..a15dc19bb4abf 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java @@ -24,16 +24,31 @@ public class RollupUsageTransportAction extends XPackUsageFeatureTransportAction { @Inject - public RollupUsageTransportAction(TransportService transportService, ClusterService clusterService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(XPackUsageFeatureAction.ROLLUP.name(), transportService, clusterService, - threadPool, actionFilters, indexNameExpressionResolver); + public RollupUsageTransportAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver + ) { + super( + XPackUsageFeatureAction.ROLLUP.name(), + transportService, + clusterService, + threadPool, + actionFilters, + indexNameExpressionResolver + ); } @Override - protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, - ActionListener listener) { - // TODO expose the currently running rollup tasks on this node? Unclear the best way to do that + protected void masterOperation( + Task task, + XPackUsageRequest request, + ClusterState state, + ActionListener listener + ) { + // TODO expose the currently running rollup tasks on this node? Unclear the best way to do that RollupFeatureSetUsage usage = new RollupFeatureSetUsage(); listener.onResponse(new XPackUsageFeatureResponse(usage)); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java index 1d31d025303dd..390743f0584d8 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.rollup.action; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -16,6 +15,7 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; @@ -41,8 +41,11 @@ public class RollupIndexCaps implements Writeable, ToXContentFragment { private static ParseField META_FIELD = new ParseField("_meta"); private static ParseField ROLLUP_FIELD = new ParseField(RollupField.ROLLUP_META); // Note: we ignore unknown fields since there may be unrelated metadata - private static final ObjectParser METADATA_PARSER - = new ObjectParser<>(GetRollupCapsAction.NAME, true, RollupIndexCaps::new); + private static final ObjectParser METADATA_PARSER = new ObjectParser<>( + GetRollupCapsAction.NAME, + true, + RollupIndexCaps::new + ); static { /* Rollup index metadata layout is: @@ -61,9 +64,11 @@ public class RollupIndexCaps implements Writeable, ToXContentFragment { } } */ - METADATA_PARSER.declareField((parser, rollupIndexCaps, aVoid) - -> rollupIndexCaps.setJobs(DocParser.DOC_PARSER.apply(parser, aVoid).jobs), - DOC_FIELD, ObjectParser.ValueType.OBJECT); + METADATA_PARSER.declareField( + (parser, rollupIndexCaps, aVoid) -> rollupIndexCaps.setJobs(DocParser.DOC_PARSER.apply(parser, aVoid).jobs), + DOC_FIELD, + ObjectParser.ValueType.OBJECT + ); } /** @@ -72,16 +77,19 @@ public class RollupIndexCaps implements Writeable, ToXContentFragment { private static class DocParser { public List jobs; // Ignore unknown fields because there could be unrelated doc types - private static final ConstructingObjectParser DOC_PARSER - = new ConstructingObjectParser<>("_rollup_doc_parser", true, a -> { + private static final ConstructingObjectParser DOC_PARSER = new ConstructingObjectParser<>( + "_rollup_doc_parser", + true, + a -> { List j = new ArrayList<>(); - for (Object o : (List)a[0]) { + for (Object o : (List) a[0]) { if (o instanceof RollupJobConfig) { j.add((RollupJobConfig) o); } } return new DocParser(j); - }); + } + ); static { DOC_PARSER.declareField(constructorArg(), MetaParser.META_PARSER::apply, META_FIELD, ObjectParser.ValueType.OBJECT); @@ -97,8 +105,11 @@ private static class DocParser { */ private static class MetaParser { // Ignore unknown fields because there could be unrelated _meta values - private static final ObjectParser, Void> META_PARSER - = new ObjectParser<>("_rollup_meta_parser", true, ArrayList::new); + private static final ObjectParser, Void> META_PARSER = new ObjectParser<>( + "_rollup_meta_parser", + true, + ArrayList::new + ); static { META_PARSER.declareField((parser, jobs, aVoid) -> { // "job-1" @@ -109,16 +120,17 @@ private static class MetaParser { } } - private List jobCaps = Collections.emptyList(); private String rollupIndexName; - private RollupIndexCaps() { } + private RollupIndexCaps() {} public RollupIndexCaps(String rollupIndexName, List jobs) { this.rollupIndexName = rollupIndexName; this.jobCaps = Objects.requireNonNull(jobs, "List of Rollup Jobs cannot be null") - .stream().map(RollupJobCaps::new).collect(Collectors.toList()); + .stream() + .map(RollupJobCaps::new) + .collect(Collectors.toList()); } RollupIndexCaps(StreamInput in) throws IOException { @@ -131,8 +143,9 @@ protected List getJobCaps() { } List getJobCapsByIndexPattern(String index) { - return jobCaps.stream().filter(cap -> index.equals(Metadata.ALL) || - cap.getIndexPattern().equals(index)).collect(Collectors.toList()); + return jobCaps.stream() + .filter(cap -> index.equals(Metadata.ALL) || cap.getIndexPattern().equals(index)) + .collect(Collectors.toList()); } void setJobs(List jobs) { @@ -150,11 +163,14 @@ public List getRollupIndices() { static RollupIndexCaps parseMetadataXContent(BytesReference source, String indexName) { XContentParser parser; try { - parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - source, XContentType.JSON); + parser = XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + source, + XContentType.JSON + ); } catch (Exception e) { - throw new RuntimeException("Unable to parse mapping metadata for index [" - + indexName + "]", e); + throw new RuntimeException("Unable to parse mapping metadata for index [" + indexName + "]", e); } return METADATA_PARSER.apply(parser, null); } @@ -185,8 +201,7 @@ public boolean equals(Object other) { RollupIndexCaps that = (RollupIndexCaps) other; - return Objects.equals(this.rollupIndexName, that.rollupIndexName) - && Objects.equals(this.jobCaps, that.jobCaps); + return Objects.equals(this.rollupIndexName, that.rollupIndexName) && Objects.equals(this.jobCaps, that.jobCaps); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java index 24f9415254e1d..5493edd10d2f7 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportDeleteRollupJobAction.java @@ -29,13 +29,24 @@ import java.util.List; -public class TransportDeleteRollupJobAction extends TransportTasksAction { +public class TransportDeleteRollupJobAction extends TransportTasksAction< + RollupJobTask, + DeleteRollupJobAction.Request, + DeleteRollupJobAction.Response, + DeleteRollupJobAction.Response> { @Inject public TransportDeleteRollupJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(DeleteRollupJobAction.NAME, clusterService, transportService, actionFilters, DeleteRollupJobAction.Request::new, - DeleteRollupJobAction.Response::new, DeleteRollupJobAction.Response::new, ThreadPool.Names.SAME); + super( + DeleteRollupJobAction.NAME, + clusterService, + transportService, + actionFilters, + DeleteRollupJobAction.Request::new, + DeleteRollupJobAction.Response::new, + DeleteRollupJobAction.Response::new, + ThreadPool.Names.SAME + ); } @Override @@ -60,31 +71,51 @@ protected void doExecute(Task task, DeleteRollupJobAction.Request request, Actio if (nodes.getMasterNode() == null) { listener.onFailure(new MasterNotDiscoveredException()); } else { - transportService.sendRequest(nodes.getMasterNode(), actionName, request, - new ActionListenerResponseHandler<>(listener, DeleteRollupJobAction.Response::new)); + transportService.sendRequest( + nodes.getMasterNode(), + actionName, + request, + new ActionListenerResponseHandler<>(listener, DeleteRollupJobAction.Response::new) + ); } } } @Override - protected void taskOperation(DeleteRollupJobAction.Request request, RollupJobTask jobTask, - ActionListener listener) { + protected void taskOperation( + DeleteRollupJobAction.Request request, + RollupJobTask jobTask, + ActionListener listener + ) { assert jobTask.getConfig().getId().equals(request.getId()); IndexerState state = ((RollupJobStatus) jobTask.getStatus()).getIndexerState(); - if (state.equals(IndexerState.STOPPED) ) { + if (state.equals(IndexerState.STOPPED)) { jobTask.onCancelled(); listener.onResponse(new DeleteRollupJobAction.Response(true)); } else { - listener.onFailure(new IllegalStateException("Could not delete job [" + request.getId() + "] because " + - "indexer state is [" + state + "]. Job must be [" + IndexerState.STOPPED + "] before deletion.")); + listener.onFailure( + new IllegalStateException( + "Could not delete job [" + + request.getId() + + "] because " + + "indexer state is [" + + state + + "]. Job must be [" + + IndexerState.STOPPED + + "] before deletion." + ) + ); } } @Override - protected DeleteRollupJobAction.Response newResponse(DeleteRollupJobAction.Request request, List tasks, - List taskOperationFailures, - List failedNodeExceptions) { + protected DeleteRollupJobAction.Response newResponse( + DeleteRollupJobAction.Request request, + List tasks, + List taskOperationFailures, + List failedNodeExceptions + ) { // There should theoretically only be one task running the rollup job // If there are more, in production it should be ok as long as they are acknowledge shutting down. // But in testing we'd like to know there were more than one hence the assert diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java index 36dd1cce5f312..9e0ddae5b527c 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.rollup.action; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -46,7 +47,7 @@ protected void doExecute(Task task, GetRollupCapsAction.Request request, ActionL } static Map getCaps(String indexPattern, ImmutableOpenMap indices) { - Map > allCaps = new TreeMap<>(); + Map> allCaps = new TreeMap<>(); for (ObjectObjectCursor entry : indices) { // Does this index have rollup metadata? @@ -62,11 +63,10 @@ static Map getCaps(String indexPattern, ImmutableOpen } jobCaps.forEach(jobCap -> { - String pattern = indexPattern.equals(Metadata.ALL) - ? jobCap.getIndexPattern() : indexPattern; + String pattern = indexPattern.equals(Metadata.ALL) ? jobCap.getIndexPattern() : indexPattern; // Do we already have an entry for this index pattern? - List indexCaps = allCaps.get(pattern); + List indexCaps = allCaps.get(pattern); if (indexCaps == null) { indexCaps = new ArrayList<>(); } @@ -79,8 +79,7 @@ static Map getCaps(String indexPattern, ImmutableOpen // Convert the mutable lists into the RollableIndexCaps return allCaps.entrySet() .stream() - .collect(Collectors.toMap(Map.Entry::getKey, - e -> new RollableIndexCaps(e.getKey(), e.getValue()))); + .collect(Collectors.toMap(Map.Entry::getKey, e -> new RollableIndexCaps(e.getKey(), e.getValue()))); } static Optional findRollupIndexCaps(String indexName, IndexMetadata indexMetadata) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java index 3efbe47b4dd9d..82d803a0f19d2 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java @@ -28,57 +28,64 @@ import java.util.stream.Collectors; import java.util.stream.StreamSupport; -public class TransportGetRollupIndexCapsAction extends HandledTransportAction { private final ClusterService clusterService; private final IndexNameExpressionResolver resolver; @Inject - public TransportGetRollupIndexCapsAction(TransportService transportService, ClusterService clusterService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + public TransportGetRollupIndexCapsAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver + ) { super(GetRollupIndexCapsAction.NAME, transportService, actionFilters, GetRollupIndexCapsAction.Request::new); this.clusterService = clusterService; this.resolver = indexNameExpressionResolver; } @Override - protected void doExecute(Task task, GetRollupIndexCapsAction.Request request, - ActionListener listener) { + protected void doExecute( + Task task, + GetRollupIndexCapsAction.Request request, + ActionListener listener + ) { - String[] indices = resolver.concreteIndexNames(clusterService.state(), - request.indicesOptions(), request); - Map allCaps = getCapsByRollupIndex(Arrays.asList(indices), - clusterService.state().getMetadata().indices()); + String[] indices = resolver.concreteIndexNames(clusterService.state(), request.indicesOptions(), request); + Map allCaps = getCapsByRollupIndex( + Arrays.asList(indices), + clusterService.state().getMetadata().indices() + ); listener.onResponse(new GetRollupIndexCapsAction.Response(allCaps)); } - static Map getCapsByRollupIndex(List resolvedIndexNames, - ImmutableOpenMap indices) { - Map > allCaps = new TreeMap<>(); + static Map getCapsByRollupIndex( + List resolvedIndexNames, + ImmutableOpenMap indices + ) { + Map> allCaps = new TreeMap<>(); - StreamSupport.stream(indices.spliterator(), false) - .filter(entry -> resolvedIndexNames.contains(entry.key)) - .forEach(entry -> { - // Does this index have rollup metadata? - TransportGetRollupCapsAction.findRollupIndexCaps(entry.key, entry.value) - .ifPresent(cap -> { - cap.getJobCaps().forEach(jobCap -> { - // Do we already have an entry for this index? - List indexCaps = allCaps.get(jobCap.getRollupIndex()); - if (indexCaps == null) { - indexCaps = new ArrayList<>(); - } - indexCaps.add(jobCap); - allCaps.put(jobCap.getRollupIndex(), indexCaps); - }); - }); + StreamSupport.stream(indices.spliterator(), false).filter(entry -> resolvedIndexNames.contains(entry.key)).forEach(entry -> { + // Does this index have rollup metadata? + TransportGetRollupCapsAction.findRollupIndexCaps(entry.key, entry.value).ifPresent(cap -> { + cap.getJobCaps().forEach(jobCap -> { + // Do we already have an entry for this index? + List indexCaps = allCaps.get(jobCap.getRollupIndex()); + if (indexCaps == null) { + indexCaps = new ArrayList<>(); + } + indexCaps.add(jobCap); + allCaps.put(jobCap.getRollupIndex(), indexCaps); + }); }); + }); // Convert the mutable lists into the RollableIndexCaps return allCaps.entrySet() .stream() - .collect(Collectors.toMap(Map.Entry::getKey, - e -> new RollableIndexCaps(e.getKey(), e.getValue()))); + .collect(Collectors.toMap(Map.Entry::getKey, e -> new RollableIndexCaps(e.getKey(), e.getValue()))); } } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java index 15cc02cdddd9f..69372dfdf8019 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java @@ -32,13 +32,24 @@ import java.util.List; import java.util.stream.Collectors; -public class TransportGetRollupJobAction extends TransportTasksAction { +public class TransportGetRollupJobAction extends TransportTasksAction< + RollupJobTask, + GetRollupJobsAction.Request, + GetRollupJobsAction.Response, + GetRollupJobsAction.Response> { @Inject public TransportGetRollupJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(GetRollupJobsAction.NAME, clusterService, transportService, actionFilters, GetRollupJobsAction.Request::new, - GetRollupJobsAction.Response::new, GetRollupJobsAction.Response::new, ThreadPool.Names.SAME); + super( + GetRollupJobsAction.NAME, + clusterService, + transportService, + actionFilters, + GetRollupJobsAction.Request::new, + GetRollupJobsAction.Response::new, + GetRollupJobsAction.Response::new, + ThreadPool.Names.SAME + ); } @Override @@ -62,8 +73,12 @@ protected void doExecute(Task task, GetRollupJobsAction.Request request, ActionL if (nodes.getMasterNode() == null) { listener.onFailure(new MasterNotDiscoveredException()); } else { - transportService.sendRequest(nodes.getMasterNode(), actionName, request, - new ActionListenerResponseHandler<>(listener, GetRollupJobsAction.Response::new)); + transportService.sendRequest( + nodes.getMasterNode(), + actionName, + request, + new ActionListenerResponseHandler<>(listener, GetRollupJobsAction.Response::new) + ); } } } @@ -80,8 +95,8 @@ static boolean stateHasRollupJobs(GetRollupJobsAction.Request request, ClusterSt // persistent tasks and see if at least once has a RollupJob param if (request.getId().equals(Metadata.ALL)) { hasRollupJobs = pTasksMeta.tasks() - .stream() - .anyMatch(persistentTask -> persistentTask.getTaskName().equals(RollupField.TASK_NAME)); + .stream() + .anyMatch(persistentTask -> persistentTask.getTaskName().equals(RollupField.TASK_NAME)); } else if (pTasksMeta.getTask(request.getId()) != null) { // If we're looking for a single job, we can just check directly @@ -92,16 +107,22 @@ static boolean stateHasRollupJobs(GetRollupJobsAction.Request request, ClusterSt } @Override - protected void taskOperation(GetRollupJobsAction.Request request, RollupJobTask jobTask, - ActionListener listener) { + protected void taskOperation( + GetRollupJobsAction.Request request, + RollupJobTask jobTask, + ActionListener listener + ) { List jobs = Collections.emptyList(); assert jobTask.getConfig().getId().equals(request.getId()) || request.getId().equals(Metadata.ALL); // Little extra insurance, make sure we only return jobs that aren't cancelled if (jobTask.isCancelled() == false) { - GetRollupJobsAction.JobWrapper wrapper = new GetRollupJobsAction.JobWrapper(jobTask.getConfig(), jobTask.getStats(), - (RollupJobStatus) jobTask.getStatus()); + GetRollupJobsAction.JobWrapper wrapper = new GetRollupJobsAction.JobWrapper( + jobTask.getConfig(), + jobTask.getStats(), + (RollupJobStatus) jobTask.getStatus() + ); jobs = Collections.singletonList(wrapper); } @@ -109,11 +130,16 @@ protected void taskOperation(GetRollupJobsAction.Request request, RollupJobTask } @Override - protected GetRollupJobsAction.Response newResponse(GetRollupJobsAction.Request request, List tasks, - List taskOperationFailures, - List failedNodeExceptions) { - List jobs = tasks.stream().map(GetRollupJobsAction.Response::getJobs) - .flatMap(Collection::stream).collect(Collectors.toList()); + protected GetRollupJobsAction.Response newResponse( + GetRollupJobsAction.Request request, + List tasks, + List taskOperationFailures, + List failedNodeExceptions + ) { + List jobs = tasks.stream() + .map(GetRollupJobsAction.Response::getJobs) + .flatMap(Collection::stream) + .collect(Collectors.toList()); return new GetRollupJobsAction.Response(jobs, taskOperationFailures, failedNodeExceptions); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index 885dde4dad0ba..c2b7c4b97ab8b 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -32,14 +32,14 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.time.DateUtils; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.rest.RestStatus; @@ -69,24 +69,40 @@ public class TransportPutRollupJobAction extends AcknowledgedTransportMasterNode private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TransportPutRollupJobAction.class); @Inject - public TransportPutRollupJobAction(TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService, - PersistentTasksService persistentTasksService, Client client) { - super(PutRollupJobAction.NAME, transportService, clusterService, threadPool, actionFilters, - PutRollupJobAction.Request::new, indexNameExpressionResolver, ThreadPool.Names.SAME); + public TransportPutRollupJobAction( + TransportService transportService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + ClusterService clusterService, + PersistentTasksService persistentTasksService, + Client client + ) { + super( + PutRollupJobAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + PutRollupJobAction.Request::new, + indexNameExpressionResolver, + ThreadPool.Names.SAME + ); this.persistentTasksService = persistentTasksService; this.client = client; } @Override - protected void masterOperation(Task task, PutRollupJobAction.Request request, ClusterState clusterState, - ActionListener listener) { + protected void masterOperation( + Task task, + PutRollupJobAction.Request request, + ClusterState clusterState, + ActionListener listener + ) { XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); checkForDeprecatedTZ(request); - FieldCapabilitiesRequest fieldCapsRequest = new FieldCapabilitiesRequest() - .indices(request.getConfig().getIndexPattern()) + FieldCapabilitiesRequest fieldCapsRequest = new FieldCapabilitiesRequest().indices(request.getConfig().getIndexPattern()) .fields(request.getConfig().getAllFields().toArray(new String[0])); fieldCapsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); @@ -106,9 +122,19 @@ static void checkForDeprecatedTZ(PutRollupJobAction.Request request) { String timeZone = request.getConfig().getGroupConfig().getDateHistogram().getTimeZone(); String modernTZ = DateUtils.DEPRECATED_LONG_TIMEZONES.get(timeZone); if (modernTZ != null) { - deprecationLogger.deprecate(DeprecationCategory.PARSING, "deprecated_timezone", - "Creating Rollup job [" + request.getConfig().getId() + "] with timezone [" - + timeZone + "], but [" + timeZone + "] has been deprecated by the IANA. Use [" + modernTZ +"] instead."); + deprecationLogger.deprecate( + DeprecationCategory.PARSING, + "deprecated_timezone", + "Creating Rollup job [" + + request.getConfig().getId() + + "] with timezone [" + + timeZone + + "], but [" + + timeZone + + "] has been deprecated by the IANA. Use [" + + modernTZ + + "] instead." + ); } } @@ -118,8 +144,13 @@ private static RollupJob createRollupJob(RollupJobConfig config, ThreadPool thre return new RollupJob(config, filteredHeaders); } - static void createIndex(RollupJob job, ActionListener listener, - PersistentTasksService persistentTasksService, Client client, Logger logger) { + static void createIndex( + RollupJob job, + ActionListener listener, + PersistentTasksService persistentTasksService, + Client client, + Logger logger + ) { CreateIndexRequest request = new CreateIndexRequest(job.getConfig().getRollupIndex()); try { @@ -130,56 +161,64 @@ static void createIndex(RollupJob job, ActionListener list return; } - client.execute(CreateIndexAction.INSTANCE, request, - ActionListener.wrap(createIndexResponse -> startPersistentTask(job, listener, persistentTasksService), e -> { - if (e instanceof ResourceAlreadyExistsException) { - logger.debug("Rolled index already exists for rollup job [" + job.getConfig().getId() + "], updating metadata."); - updateMapping(job, listener, persistentTasksService, client, logger); - } else { - String msg = "Could not create index for rollup job [" + job.getConfig().getId() + "]"; - logger.error(msg); - listener.onFailure(new RuntimeException(msg, e)); - } - })); + client.execute( + CreateIndexAction.INSTANCE, + request, + ActionListener.wrap(createIndexResponse -> startPersistentTask(job, listener, persistentTasksService), e -> { + if (e instanceof ResourceAlreadyExistsException) { + logger.debug("Rolled index already exists for rollup job [" + job.getConfig().getId() + "], updating metadata."); + updateMapping(job, listener, persistentTasksService, client, logger); + } else { + String msg = "Could not create index for rollup job [" + job.getConfig().getId() + "]"; + logger.error(msg); + listener.onFailure(new RuntimeException(msg, e)); + } + }) + ); } private static XContentBuilder createMappings(RollupJobConfig config) throws IOException { return XContentBuilder.builder(XContentType.JSON.xContent()) .startObject() - .startObject("mappings") - .startObject("_doc") - .startObject("_meta") - .field(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD, Version.CURRENT.toString()) - .startObject("_rollup") - .field(config.getId(), config) - .endObject() - .endObject() - .startArray("dynamic_templates") - .startObject() - .startObject("strings") - .field("match_mapping_type", "string") - .startObject("mapping") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - .startObject() - .startObject("date_histograms") - .field("path_match", "*.date_histogram.timestamp") - .startObject("mapping") - .field("type", "date") - .endObject() - .endObject() - .endObject() - .endArray() - .endObject() - .endObject() + .startObject("mappings") + .startObject("_doc") + .startObject("_meta") + .field(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD, Version.CURRENT.toString()) + .startObject("_rollup") + .field(config.getId(), config) + .endObject() + .endObject() + .startArray("dynamic_templates") + .startObject() + .startObject("strings") + .field("match_mapping_type", "string") + .startObject("mapping") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .startObject() + .startObject("date_histograms") + .field("path_match", "*.date_histogram.timestamp") + .startObject("mapping") + .field("type", "date") + .endObject() + .endObject() + .endObject() + .endArray() + .endObject() + .endObject() .endObject(); } @SuppressWarnings("unchecked") - static void updateMapping(RollupJob job, ActionListener listener, - PersistentTasksService persistentTasksService, Client client, Logger logger) { + static void updateMapping( + RollupJob job, + ActionListener listener, + PersistentTasksService persistentTasksService, + Client client, + Logger logger + ) { final String indexName = job.getConfig().getRollupIndex(); @@ -187,8 +226,10 @@ static void updateMapping(RollupJob job, ActionListener li MappingMetadata mappings = getMappingResponse.getMappings().get(indexName); Object m = mappings.getSourceAsMap().get("_meta"); if (m == null) { - String msg = "Rollup data cannot be added to existing indices that contain non-rollup data (expected " + - "to find _meta key in mapping of rollup index [" + indexName + "] but not found)."; + String msg = "Rollup data cannot be added to existing indices that contain non-rollup data (expected " + + "to find _meta key in mapping of rollup index [" + + indexName + + "] but not found)."; logger.error(msg); listener.onFailure(new RuntimeException(msg)); return; @@ -196,26 +237,31 @@ static void updateMapping(RollupJob job, ActionListener li Map metadata = (Map) m; if (metadata.get(RollupField.ROLLUP_META) == null) { - String msg = "Rollup data cannot be added to existing indices that contain non-rollup data (expected " + - "to find rollup meta key [" + RollupField.ROLLUP_META + "] in mapping of rollup index [" - + indexName + "] but not found)."; + String msg = "Rollup data cannot be added to existing indices that contain non-rollup data (expected " + + "to find rollup meta key [" + + RollupField.ROLLUP_META + + "] in mapping of rollup index [" + + indexName + + "] but not found)."; logger.error(msg); listener.onFailure(new RuntimeException(msg)); return; } - Map rollupMeta = (Map)((Map) m).get(RollupField.ROLLUP_META); + Map rollupMeta = (Map) ((Map) m).get(RollupField.ROLLUP_META); - String stringVersion = (String)((Map) m).get(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD); + String stringVersion = (String) ((Map) m).get(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD); if (stringVersion == null) { - listener.onFailure(new IllegalStateException("Could not determine version of existing rollup metadata for index [" - + indexName + "]")); + listener.onFailure( + new IllegalStateException("Could not determine version of existing rollup metadata for index [" + indexName + "]") + ); return; } if (rollupMeta.get(job.getConfig().getId()) != null) { - String msg = "Cannot create rollup job [" + job.getConfig().getId() - + "] because job was previously created (existing metadata)."; + String msg = "Cannot create rollup job [" + + job.getConfig().getId() + + "] because job was previously created (existing metadata)."; logger.error(msg); listener.onFailure(new ElasticsearchStatusException(msg, RestStatus.CONFLICT)); return; @@ -227,56 +273,74 @@ static void updateMapping(RollupJob job, ActionListener li newMapping.put("_meta", metadata); PutMappingRequest request = new PutMappingRequest(indexName); request.source(newMapping); - client.execute(PutMappingAction.INSTANCE, request, - ActionListener.wrap(putMappingResponse -> startPersistentTask(job, listener, persistentTasksService), - listener::onFailure)); + client.execute( + PutMappingAction.INSTANCE, + request, + ActionListener.wrap(putMappingResponse -> startPersistentTask(job, listener, persistentTasksService), listener::onFailure) + ); }; GetMappingsRequest request = new GetMappingsRequest(); - client.execute(GetMappingsAction.INSTANCE, request, ActionListener.wrap(getMappingResponseHandler, - e -> { - String msg = "Could not update mappings for rollup job [" + job.getConfig().getId() + "]"; - logger.error(msg); - listener.onFailure(new RuntimeException(msg, e)); - })); + client.execute(GetMappingsAction.INSTANCE, request, ActionListener.wrap(getMappingResponseHandler, e -> { + String msg = "Could not update mappings for rollup job [" + job.getConfig().getId() + "]"; + logger.error(msg); + listener.onFailure(new RuntimeException(msg, e)); + })); } - static void startPersistentTask(RollupJob job, ActionListener listener, - PersistentTasksService persistentTasksService) { + static void startPersistentTask( + RollupJob job, + ActionListener listener, + PersistentTasksService persistentTasksService + ) { assertNoAuthorizationHeader(job.getHeaders()); - persistentTasksService.sendStartRequest(job.getConfig().getId(), RollupField.TASK_NAME, job, - ActionListener.wrap( - rollupConfigPersistentTask -> waitForRollupStarted(job, listener, persistentTasksService), - e -> { - if (e instanceof ResourceAlreadyExistsException) { - e = new ElasticsearchStatusException("Cannot create job [" + job.getConfig().getId() + - "] because it has already been created (task exists)", RestStatus.CONFLICT, e); - } - listener.onFailure(e); - })); + persistentTasksService.sendStartRequest( + job.getConfig().getId(), + RollupField.TASK_NAME, + job, + ActionListener.wrap(rollupConfigPersistentTask -> waitForRollupStarted(job, listener, persistentTasksService), e -> { + if (e instanceof ResourceAlreadyExistsException) { + e = new ElasticsearchStatusException( + "Cannot create job [" + job.getConfig().getId() + "] because it has already been created (task exists)", + RestStatus.CONFLICT, + e + ); + } + listener.onFailure(e); + }) + ); } - - private static void waitForRollupStarted(RollupJob job, ActionListener listener, - PersistentTasksService persistentTasksService) { - persistentTasksService.waitForPersistentTaskCondition(job.getConfig().getId(), Objects::nonNull, job.getConfig().getTimeout(), - new PersistentTasksService.WaitForPersistentTaskListener() { - @Override - public void onResponse(PersistentTasksCustomMetadata.PersistentTask task) { - listener.onResponse(AcknowledgedResponse.TRUE); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - - @Override - public void onTimeout(TimeValue timeout) { - listener.onFailure(new ElasticsearchException("Creation of task for Rollup Job ID [" - + job.getConfig().getId() + "] timed out after [" + timeout + "]")); - } - }); + private static void waitForRollupStarted( + RollupJob job, + ActionListener listener, + PersistentTasksService persistentTasksService + ) { + persistentTasksService.waitForPersistentTaskCondition( + job.getConfig().getId(), + Objects::nonNull, + job.getConfig().getTimeout(), + new PersistentTasksService.WaitForPersistentTaskListener() { + @Override + public void onResponse(PersistentTasksCustomMetadata.PersistentTask task) { + listener.onResponse(AcknowledgedResponse.TRUE); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + + @Override + public void onTimeout(TimeValue timeout) { + listener.onFailure( + new ElasticsearchException( + "Creation of task for Rollup Job ID [" + job.getConfig().getId() + "] timed out after [" + timeout + "]" + ) + ); + } + } + ); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 8087fcce2a64f..7af3eb7c8881a 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -84,9 +84,16 @@ public class TransportRollupSearchAction extends TransportAction { InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forPartialReduction( - bigArrays, scriptService, () -> PipelineAggregator.PipelineTree.EMPTY); + bigArrays, + scriptService, + () -> PipelineAggregator.PipelineTree.EMPTY + ); listener.onResponse(processResponses(rollupSearchContext, msearchResponse, context)); }, listener::onFailure)); } - static SearchResponse processResponses(RollupSearchContext rollupContext, MultiSearchResponse msearchResponse, - InternalAggregation.ReduceContext reduceContext) throws Exception { + static SearchResponse processResponses( + RollupSearchContext rollupContext, + MultiSearchResponse msearchResponse, + InternalAggregation.ReduceContext reduceContext + ) throws Exception { if (rollupContext.hasLiveIndices() && rollupContext.hasRollupIndices()) { // Both return RollupResponseTranslator.combineResponses(msearchResponse.getResponses(), reduceContext); @@ -157,12 +176,12 @@ static MultiSearchRequest createMSearchRequest(SearchRequest request, NamedWrite AggregatorFactories.Builder sourceAgg = request.source().aggregations(); // If there are no aggs in the request, our translation won't create any msearch. - // So just add an dummy request to the msearch and return. This is a bit silly + // So just add an dummy request to the msearch and return. This is a bit silly // but maintains how the regular search API behaves if (sourceAgg == null || sourceAgg.count() == 0) { // Note: we can't apply any query rewriting or filtering on the query because there - // are no validated caps, so we have no idea what job is intended here. The only thing + // are no validated caps, so we have no idea what job is intended here. The only thing // this affects is doc count, since hits and aggs will both be empty it doesn't really matter. msearch.add(new SearchRequest(context.getRollupIndices(), request.source())); return msearch; @@ -171,7 +190,7 @@ static MultiSearchRequest createMSearchRequest(SearchRequest request, NamedWrite // Find our list of "best" job caps Set validatedCaps = new HashSet<>(); sourceAgg.getAggregatorFactories() - .forEach(agg -> validatedCaps.addAll(RollupJobIdentifierUtils.findBestJobs(agg, context.getJobCaps()))); + .forEach(agg -> validatedCaps.addAll(RollupJobIdentifierUtils.findBestJobs(agg, context.getJobCaps()))); List jobIds = validatedCaps.stream().map(RollupJobCaps::getJobID).collect(Collectors.toList()); for (AggregationBuilder agg : sourceAgg.getAggregatorFactories()) { @@ -185,8 +204,7 @@ static MultiSearchRequest createMSearchRequest(SearchRequest request, NamedWrite BoolQueryBuilder boolQuery = new BoolQueryBuilder(); filterConditions.forEach(boolQuery::must); - FilterAggregationBuilder filterAgg = new FilterAggregationBuilder(RollupField.FILTER + "_" + agg.getName(), - boolQuery); + FilterAggregationBuilder filterAgg = new FilterAggregationBuilder(RollupField.FILTER + "_" + agg.getName(), boolQuery); translatedAgg.forEach(filterAgg::subAggregation); rolledSearchSource.aggregation(filterAgg); } @@ -203,12 +221,17 @@ static MultiSearchRequest createMSearchRequest(SearchRequest request, NamedWrite } // filter the rewritten query by JobID - copiedSource.query(new BoolQueryBuilder() - .must(rewritten) + copiedSource.query( + new BoolQueryBuilder().must(rewritten) .filter(new TermQueryBuilder(RollupField.formatMetaField(RollupField.ID.getPreferredName()), id)) // Both versions are acceptable right now since they are compatible at search time - .filter(new TermsQueryBuilder(RollupField.formatMetaField(RollupField.VERSION_FIELD), - new long[]{Rollup.ROLLUP_VERSION_V1, Rollup.ROLLUP_VERSION_V2}))); + .filter( + new TermsQueryBuilder( + RollupField.formatMetaField(RollupField.VERSION_FIELD), + new long[] { Rollup.ROLLUP_VERSION_V1, Rollup.ROLLUP_VERSION_V2 } + ) + ) + ); // And add a new msearch per JobID msearch.add(new SearchRequest(context.getRollupIndices(), copiedSource)); @@ -224,8 +247,11 @@ static MultiSearchRequest createMSearchRequest(SearchRequest request, NamedWrite * reading it in again using a {@link Writeable.Reader}. The stream that is wrapped around the {@link StreamInput} * potentially need to use a {@link NamedWriteableRegistry}, so this needs to be provided too */ - private static SearchSourceBuilder copyWriteable(SearchSourceBuilder original, NamedWriteableRegistry namedWriteableRegistry, - Writeable.Reader reader) throws IOException { + private static SearchSourceBuilder copyWriteable( + SearchSourceBuilder original, + NamedWriteableRegistry namedWriteableRegistry, + Writeable.Reader reader + ) throws IOException { Writeable.Writer writer = (out, value) -> value.writeTo(out); try (BytesStreamOutput output = new BytesStreamOutput()) { output.setVersion(Version.CURRENT); @@ -240,8 +266,7 @@ private static SearchSourceBuilder copyWriteable(SearchSourceBuilder original, N static void validateSearchRequest(SearchRequest request) { // Rollup does not support hits at the moment if (request.source().size() != 0) { - throw new IllegalArgumentException("Rollup does not support returning search hits, please try again " + - "with [size: 0]."); + throw new IllegalArgumentException("Rollup does not support returning search hits, please try again " + "with [size: 0]."); } if (request.source().postFilter() != null) { @@ -271,16 +296,18 @@ static QueryBuilder rewriteQuery(QueryBuilder builder, Set jobCap } if (builder.getWriteableName().equals(BoolQueryBuilder.NAME)) { BoolQueryBuilder rewrittenBool = new BoolQueryBuilder(); - ((BoolQueryBuilder)builder).must().forEach(query -> rewrittenBool.must(rewriteQuery(query, jobCaps))); - ((BoolQueryBuilder)builder).mustNot().forEach(query -> rewrittenBool.mustNot(rewriteQuery(query, jobCaps))); - ((BoolQueryBuilder)builder).should().forEach(query -> rewrittenBool.should(rewriteQuery(query, jobCaps))); - ((BoolQueryBuilder)builder).filter().forEach(query -> rewrittenBool.filter(rewriteQuery(query, jobCaps))); + ((BoolQueryBuilder) builder).must().forEach(query -> rewrittenBool.must(rewriteQuery(query, jobCaps))); + ((BoolQueryBuilder) builder).mustNot().forEach(query -> rewrittenBool.mustNot(rewriteQuery(query, jobCaps))); + ((BoolQueryBuilder) builder).should().forEach(query -> rewrittenBool.should(rewriteQuery(query, jobCaps))); + ((BoolQueryBuilder) builder).filter().forEach(query -> rewrittenBool.filter(rewriteQuery(query, jobCaps))); return rewrittenBool; } else if (builder.getWriteableName().equals(ConstantScoreQueryBuilder.NAME)) { - return new ConstantScoreQueryBuilder(rewriteQuery(((ConstantScoreQueryBuilder)builder).innerQuery(), jobCaps)); + return new ConstantScoreQueryBuilder(rewriteQuery(((ConstantScoreQueryBuilder) builder).innerQuery(), jobCaps)); } else if (builder.getWriteableName().equals(BoostingQueryBuilder.NAME)) { - return new BoostingQueryBuilder(rewriteQuery(((BoostingQueryBuilder)builder).negativeQuery(), jobCaps), - rewriteQuery(((BoostingQueryBuilder)builder).positiveQuery(), jobCaps)); + return new BoostingQueryBuilder( + rewriteQuery(((BoostingQueryBuilder) builder).negativeQuery(), jobCaps), + rewriteQuery(((BoostingQueryBuilder) builder).positiveQuery(), jobCaps) + ); } else if (builder.getWriteableName().equals(DisMaxQueryBuilder.NAME)) { DisMaxQueryBuilder rewritten = new DisMaxQueryBuilder(); ((DisMaxQueryBuilder) builder).innerQueries().forEach(query -> rewritten.add(rewriteQuery(query, jobCaps))); @@ -290,8 +317,7 @@ static QueryBuilder rewriteQuery(QueryBuilder builder, Set jobCap String fieldName = range.fieldName(); String rewrittenFieldName = rewriteFieldName(jobCaps, RangeQueryBuilder.NAME, fieldName); - RangeQueryBuilder rewritten = new RangeQueryBuilder(rewrittenFieldName) - .from(range.from()) + RangeQueryBuilder rewritten = new RangeQueryBuilder(rewrittenFieldName).from(range.from()) .to(range.to()) .includeLower(range.includeLower()) .includeUpper(range.includeUpper()); @@ -305,12 +331,12 @@ static QueryBuilder rewriteQuery(QueryBuilder builder, Set jobCap } else if (builder.getWriteableName().equals(TermQueryBuilder.NAME)) { TermQueryBuilder term = (TermQueryBuilder) builder; String fieldName = term.fieldName(); - String rewrittenFieldName = rewriteFieldName(jobCaps, TermQueryBuilder.NAME, fieldName); + String rewrittenFieldName = rewriteFieldName(jobCaps, TermQueryBuilder.NAME, fieldName); return new TermQueryBuilder(rewrittenFieldName, term.value()); } else if (builder.getWriteableName().equals(TermsQueryBuilder.NAME)) { TermsQueryBuilder terms = (TermsQueryBuilder) builder; String fieldName = terms.fieldName(); - String rewrittenFieldName = rewriteFieldName(jobCaps, TermQueryBuilder.NAME, fieldName); + String rewrittenFieldName = rewriteFieldName(jobCaps, TermQueryBuilder.NAME, fieldName); return new TermsQueryBuilder(rewrittenFieldName, terms.getValues()); } else if (builder.getWriteableName().equals(MatchAllQueryBuilder.NAME)) { // no-op @@ -326,10 +352,11 @@ private static String rewriteFieldName(Set jobCaps, String builde .filter(caps -> caps.getFieldCaps().keySet().contains(fieldName)) .map(caps -> { RollupJobCaps.RollupFieldCaps fieldCaps = caps.getFieldCaps().get(fieldName); - return fieldCaps.getAggs().stream() + return fieldCaps.getAggs() + .stream() // For now, we only allow filtering on grouping fields .filter(agg -> { - String type = (String)agg.get(RollupField.AGG); + String type = (String) agg.get(RollupField.AGG); // make sure it's one of the three groups return type.equals(TermsAggregationBuilder.NAME) || type.equals(DateHistogramAggregationBuilder.NAME) @@ -338,9 +365,9 @@ private static String rewriteFieldName(Set jobCaps, String builde // Rewrite the field name to our convention (e.g. "foo" -> "date_histogram.foo.timestamp") .map(agg -> { if (agg.get(RollupField.AGG).equals(DateHistogramAggregationBuilder.NAME)) { - return RollupField.formatFieldName(fieldName, (String)agg.get(RollupField.AGG), RollupField.TIMESTAMP); + return RollupField.formatFieldName(fieldName, (String) agg.get(RollupField.AGG), RollupField.TIMESTAMP); } else { - return RollupField.formatFieldName(fieldName, (String)agg.get(RollupField.AGG), RollupField.VALUE); + return RollupField.formatFieldName(fieldName, (String) agg.get(RollupField.AGG), RollupField.VALUE); } }) .collect(Collectors.toList()); @@ -348,11 +375,17 @@ private static String rewriteFieldName(Set jobCaps, String builde .distinct() .collect(ArrayList::new, List::addAll, List::addAll); if (rewrittenFieldNames.isEmpty()) { - throw new IllegalArgumentException("Field [" + fieldName + "] in [" + builderName - + "] query is not available in selected rollup indices, cannot query."); + throw new IllegalArgumentException( + "Field [" + fieldName + "] in [" + builderName + "] query is not available in selected rollup indices, cannot query." + ); } else if (rewrittenFieldNames.size() > 1) { - throw new IllegalArgumentException("Ambiguous field name resolution when mapping to rolled fields. Field name [" + - fieldName + "] was mapped to: [" + Strings.collectionToDelimitedString(rewrittenFieldNames, ",") + "]."); + throw new IllegalArgumentException( + "Ambiguous field name resolution when mapping to rolled fields. Field name [" + + fieldName + + "] was mapped to: [" + + Strings.collectionToDelimitedString(rewrittenFieldNames, ",") + + "]." + ); } else { return rewrittenFieldNames.get(0); } @@ -366,7 +399,7 @@ static RollupSearchContext separateIndices(String[] indices, ImmutableOpenMap rollup = new ArrayList<>(); List normal = new ArrayList<>(); - Set jobCaps = new HashSet<>(); + Set jobCaps = new HashSet<>(); Arrays.stream(indices).forEach(i -> { if (i.equals(Metadata.ALL)) { throw new IllegalArgumentException("Searching _all via RollupSearch endpoint is not supported at this time."); @@ -381,8 +414,11 @@ static RollupSearchContext separateIndices(String[] indices, ImmutableOpenMap 0; if (rollup.size() > 1) { - throw new IllegalArgumentException("RollupSearch currently only supports searching one rollup index at a time. " + - "Found the following rollup indices: " + rollup); + throw new IllegalArgumentException( + "RollupSearch currently only supports searching one rollup index at a time. " + + "Found the following rollup indices: " + + rollup + ); } return new RollupSearchContext(normal.toArray(new String[0]), rollup.toArray(new String[0]), jobCaps); } @@ -408,12 +444,13 @@ public void onFailure(Exception e) { channel.sendResponse(e); } catch (Exception e1) { logger.warn( - (org.apache.logging.log4j.util.Supplier) - () -> new ParameterizedMessage( - "Failed to send error response for action [{}] and request [{}]", - actionName, - request), - e1); + (org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( + "Failed to send error response for action [{}] and request [{}]", + actionName, + request + ), + e1 + ); } } }); @@ -453,4 +490,3 @@ Set getJobCaps() { } } - diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java index 3514f3e00c438..3a47a798b8312 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java @@ -22,45 +22,60 @@ import java.util.List; import java.util.function.Consumer; -public class TransportStartRollupAction extends TransportTasksAction { +public class TransportStartRollupAction extends TransportTasksAction< + RollupJobTask, + StartRollupJobAction.Request, + StartRollupJobAction.Response, + StartRollupJobAction.Response> { @Inject public TransportStartRollupAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(StartRollupJobAction.NAME, clusterService, transportService, actionFilters, StartRollupJobAction.Request::new, - StartRollupJobAction.Response::new, StartRollupJobAction.Response::new, ThreadPool.Names.SAME); + super( + StartRollupJobAction.NAME, + clusterService, + transportService, + actionFilters, + StartRollupJobAction.Request::new, + StartRollupJobAction.Response::new, + StartRollupJobAction.Response::new, + ThreadPool.Names.SAME + ); } - @Override protected void processTasks(StartRollupJobAction.Request request, Consumer operation) { TransportTaskHelper.doProcessTasks(request.getId(), operation, taskManager); } - @Override - protected void taskOperation(StartRollupJobAction.Request request, - RollupJobTask jobTask, - ActionListener listener) { + protected void taskOperation( + StartRollupJobAction.Request request, + RollupJobTask jobTask, + ActionListener listener + ) { if (jobTask.getConfig().getId().equals(request.getId())) { jobTask.start(listener); } else { - listener.onFailure(new RuntimeException("ID of rollup task [" + jobTask.getConfig().getId() - + "] does not match request's ID [" + request.getId() + "]")); + listener.onFailure( + new RuntimeException( + "ID of rollup task [" + jobTask.getConfig().getId() + "] does not match request's ID [" + request.getId() + "]" + ) + ); } } @Override - protected StartRollupJobAction.Response newResponse(StartRollupJobAction.Request request, List tasks, - List taskOperationFailures, - List failedNodeExceptions) { + protected StartRollupJobAction.Response newResponse( + StartRollupJobAction.Request request, + List tasks, + List taskOperationFailures, + List failedNodeExceptions + ) { if (taskOperationFailures.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(taskOperationFailures.get(0).getCause()); + throw org.elasticsearch.ExceptionsHelper.convertToElastic(taskOperationFailures.get(0).getCause()); } else if (failedNodeExceptions.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(failedNodeExceptions.get(0)); + throw org.elasticsearch.ExceptionsHelper.convertToElastic(failedNodeExceptions.get(0)); } // Either the job doesn't exist (the user didn't create it yet) or was deleted after the StartAPI executed. diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java index 093cf12d314d1..b512046420c93 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStopRollupAction.java @@ -28,22 +28,37 @@ import java.util.function.BooleanSupplier; import java.util.function.Consumer; -public class TransportStopRollupAction extends TransportTasksAction { +public class TransportStopRollupAction extends TransportTasksAction< + RollupJobTask, + StopRollupJobAction.Request, + StopRollupJobAction.Response, + StopRollupJobAction.Response> { private final ThreadPool threadPool; @Inject - public TransportStopRollupAction(TransportService transportService, ActionFilters actionFilters, - ClusterService clusterService, ThreadPool threadPool) { - super(StopRollupJobAction.NAME, clusterService, transportService, actionFilters, StopRollupJobAction.Request::new, - StopRollupJobAction.Response::new, StopRollupJobAction.Response::new, ThreadPool.Names.SAME); + public TransportStopRollupAction( + TransportService transportService, + ActionFilters actionFilters, + ClusterService clusterService, + ThreadPool threadPool + ) { + super( + StopRollupJobAction.NAME, + clusterService, + transportService, + actionFilters, + StopRollupJobAction.Request::new, + StopRollupJobAction.Response::new, + StopRollupJobAction.Response::new, + ThreadPool.Names.SAME + ); this.threadPool = threadPool; } @Override protected void processTasks(StopRollupJobAction.Request request, Consumer operation) { - TransportTaskHelper.doProcessTasks(request.getId(), operation, taskManager); + TransportTaskHelper.doProcessTasks(request.getId(), operation, taskManager); } @Override @@ -52,45 +67,71 @@ protected void doExecute(Task task, StopRollupJobAction.Request request, ActionL } @Override - protected void taskOperation(StopRollupJobAction.Request request, RollupJobTask jobTask, - ActionListener listener) { + protected void taskOperation( + StopRollupJobAction.Request request, + RollupJobTask jobTask, + ActionListener listener + ) { if (jobTask.getConfig().getId().equals(request.getId())) { jobTask.stop(maybeWrapWithBlocking(request, jobTask, listener, threadPool)); } else { - listener.onFailure(new RuntimeException("ID of rollup task [" + jobTask.getConfig().getId() - + "] does not match request's ID [" + request.getId() + "]")); + listener.onFailure( + new RuntimeException( + "ID of rollup task [" + jobTask.getConfig().getId() + "] does not match request's ID [" + request.getId() + "]" + ) + ); } } - private static ActionListener maybeWrapWithBlocking(StopRollupJobAction.Request request, - RollupJobTask jobTask, - ActionListener listener, - ThreadPool threadPool) { + private static ActionListener maybeWrapWithBlocking( + StopRollupJobAction.Request request, + RollupJobTask jobTask, + ActionListener listener, + ThreadPool threadPool + ) { if (request.waitForCompletion()) { return ActionListener.wrap(response -> { if (response.isStopped()) { // The Task acknowledged that it is stopped/stopping... wait until the status actually - // changes over before returning. Switch over to Generic threadpool so + // changes over before returning. Switch over to Generic threadpool so // we don't block the network thread threadPool.generic().execute(() -> { try { - boolean stopped = awaitBusy(() -> ((RollupJobStatus) jobTask.getStatus()) - .getIndexerState().equals(IndexerState.STOPPED), request.timeout()); + boolean stopped = awaitBusy( + () -> ((RollupJobStatus) jobTask.getStatus()).getIndexerState().equals(IndexerState.STOPPED), + request.timeout() + ); if (stopped) { // We have successfully confirmed a stop, send back the response listener.onResponse(response); } else { - listener.onFailure(new ElasticsearchTimeoutException("Timed out after [" + request.timeout().getStringRep() - + "] while waiting for rollup job [" + request.getId() + "] to stop. State was [" - + ((RollupJobStatus) jobTask.getStatus()).getIndexerState() + "]")); + listener.onFailure( + new ElasticsearchTimeoutException( + "Timed out after [" + + request.timeout().getStringRep() + + "] while waiting for rollup job [" + + request.getId() + + "] to stop. State was [" + + ((RollupJobStatus) jobTask.getStatus()).getIndexerState() + + "]" + ) + ); } } catch (InterruptedException e) { listener.onFailure(e); } catch (Exception e) { - listener.onFailure(new ElasticsearchTimeoutException("Encountered unexpected error while waiting for " + - "rollup job [" + request.getId() + "] to stop. State was [" - + ((RollupJobStatus) jobTask.getStatus()).getIndexerState() + "].", e)); + listener.onFailure( + new ElasticsearchTimeoutException( + "Encountered unexpected error while waiting for " + + "rollup job [" + + request.getId() + + "] to stop. State was [" + + ((RollupJobStatus) jobTask.getStatus()).getIndexerState() + + "].", + e + ) + ); } }); @@ -126,16 +167,17 @@ private static boolean awaitBusy(BooleanSupplier breakSupplier, TimeValue maxWai } @Override - protected StopRollupJobAction.Response newResponse(StopRollupJobAction.Request request, List tasks, - List taskOperationFailures, - List failedNodeExceptions) { + protected StopRollupJobAction.Response newResponse( + StopRollupJobAction.Request request, + List tasks, + List taskOperationFailures, + List failedNodeExceptions + ) { if (taskOperationFailures.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(taskOperationFailures.get(0).getCause()); + throw org.elasticsearch.ExceptionsHelper.convertToElastic(taskOperationFailures.get(0).getCause()); } else if (failedNodeExceptions.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(failedNodeExceptions.get(0)); + throw org.elasticsearch.ExceptionsHelper.convertToElastic(failedNodeExceptions.get(0)); } // Either the job doesn't exist (the user didn't create it yet) or was deleted after the Stop API executed. diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelper.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelper.java index 5d8e4824ad062..187bd46baf2b3 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelper.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelper.java @@ -21,10 +21,11 @@ public class TransportTaskHelper { static void doProcessTasks(String id, Consumer operation, TaskManager taskManager) { RollupJobTask matchingTask = null; for (Task task : taskManager.getTasks().values()) { - if (task instanceof RollupJobTask && ((RollupJobTask)task).getConfig().getId().equals(id)) { + if (task instanceof RollupJobTask && ((RollupJobTask) task).getConfig().getId().equals(id)) { if (matchingTask != null) { - throw new IllegalArgumentException("Found more than one matching task for rollup job [" + id + "] when " + - "there should only be one."); + throw new IllegalArgumentException( + "Found more than one matching task for rollup job [" + id + "] when " + "there should only be one." + ); } matchingTask = (RollupJobTask) task; } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java index 2aa0dbe1e75e1..98bff78687ffd 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java @@ -47,11 +47,16 @@ class IndexerUtils { * @param jobId The ID for the job * @return A stream of rolled documents derived from the response */ - static Stream processBuckets(CompositeAggregation agg, String rollupIndex, RollupIndexerJobStats stats, - GroupConfig groupConfig, String jobId) { + static Stream processBuckets( + CompositeAggregation agg, + String rollupIndex, + RollupIndexerJobStats stats, + GroupConfig groupConfig, + String jobId + ) { logger.debug("Buckets: [" + agg.getBuckets().size() + "][" + jobId + "]"); - return agg.getBuckets().stream().map(b ->{ + return agg.getBuckets().stream().map(b -> { stats.incrementNumDocuments(b.getDocCount()); // Put the composite keys into a treemap so that the key iteration order is consistent @@ -59,14 +64,14 @@ static Stream processBuckets(CompositeAggregation agg, String roll TreeMap keys = new TreeMap<>(b.getKey()); List metrics = b.getAggregations().asList(); - RollupIDGenerator idGenerator = new RollupIDGenerator(jobId); + RollupIDGenerator idGenerator = new RollupIDGenerator(jobId); Map doc = new HashMap<>(keys.size() + metrics.size()); processKeys(keys, doc, b.getDocCount(), groupConfig, idGenerator); idGenerator.add(jobId); processMetrics(metrics, doc); - doc.put(RollupField.ROLLUP_META + "." + RollupField.VERSION_FIELD, Rollup.CURRENT_ROLLUP_VERSION ); + doc.put(RollupField.ROLLUP_META + "." + RollupField.VERSION_FIELD, Rollup.CURRENT_ROLLUP_VERSION); doc.put(RollupField.ROLLUP_META + "." + RollupField.ID.getPreferredName(), jobId); IndexRequest request = new IndexRequest(rollupIndex).id(idGenerator.getID()); @@ -75,18 +80,23 @@ static Stream processBuckets(CompositeAggregation agg, String roll }); } - private static void processKeys(Map keys, Map doc, - long count, GroupConfig groupConfig, RollupIDGenerator idGenerator) { + private static void processKeys( + Map keys, + Map doc, + long count, + GroupConfig groupConfig, + RollupIDGenerator idGenerator + ) { keys.forEach((k, v) -> { - // Also add a doc count for each key. This will duplicate data, but makes search easier later + // Also add a doc count for each key. This will duplicate data, but makes search easier later doc.put(k + "." + RollupField.COUNT_FIELD, count); if (k.endsWith("." + DateHistogramAggregationBuilder.NAME)) { assert v != null; doc.put(k + "." + RollupField.TIMESTAMP, v); - doc.put(k + "." + RollupField.INTERVAL, groupConfig.getDateHistogram().getInterval()); - doc.put(k + "." + DateHistogramGroupConfig.TIME_ZONE, groupConfig.getDateHistogram().getTimeZone()); - idGenerator.add((Long)v); + doc.put(k + "." + RollupField.INTERVAL, groupConfig.getDateHistogram().getInterval()); + doc.put(k + "." + DateHistogramGroupConfig.TIME_ZONE, groupConfig.getDateHistogram().getTimeZone()); + idGenerator.add((Long) v); } else if (k.endsWith("." + HistogramAggregationBuilder.NAME)) { doc.put(k + "." + RollupField.VALUE, v); doc.put(k + "." + RollupField.INTERVAL, groupConfig.getHistogram().getInterval()); @@ -100,14 +110,13 @@ private static void processKeys(Map keys, Map do if (v == null) { idGenerator.addNull(); } else if (v instanceof String) { - idGenerator.add((String)v); + idGenerator.add((String) v); } else if (v instanceof Long) { - idGenerator.add((Long)v); + idGenerator.add((Long) v); } else if (v instanceof Double) { - idGenerator.add((Double)v); + idGenerator.add((Double) v); } else { - throw new RuntimeException("Encountered value of type [" - + v.getClass() + "], which was unable to be processed."); + throw new RuntimeException("Encountered value of type [" + v.getClass() + "], which was unable to be processed."); } } else { throw new ElasticsearchException("Could not identify key in agg [" + k + "]"); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIDGenerator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIDGenerator.java index 4c0595dec7068..d0c0fa4748377 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIDGenerator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIDGenerator.java @@ -97,8 +97,7 @@ private void setFlag() { public String getID() { setFlag(); - MurmurHash3.Hash128 hasher - = MurmurHash3.hash128(id.bytes(), 0, id.length(), SEED, new MurmurHash3.Hash128()); + MurmurHash3.Hash128 hasher = MurmurHash3.hash128(id.bytes(), 0, id.length(), SEED, new MurmurHash3.Hash128()); byte[] hashedBytes = new byte[16]; System.arraycopy(Numbers.longToBytes(hasher.h1), 0, hashedBytes, 0, 8); System.arraycopy(Numbers.longToBytes(hasher.h2), 0, hashedBytes, 8, 8); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index 8c2db00ddc787..cfe57193985cc 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -72,8 +72,7 @@ public abstract class RollupIndexer extends AsyncTwoPhaseIndexer initialState, - Map initialPosition) { + RollupIndexer(ThreadPool threadPool, RollupJob job, AtomicReference initialState, Map initialPosition) { this(threadPool, job, initialState, initialPosition, new RollupIndexerJobStats()); } @@ -85,8 +84,13 @@ public abstract class RollupIndexer extends AsyncTwoPhaseIndexer initialState, - Map initialPosition, RollupIndexerJobStats jobStats) { + RollupIndexer( + ThreadPool threadPool, + RollupJob job, + AtomicReference initialState, + Map initialPosition, + RollupIndexerJobStats jobStats + ) { super(threadPool, initialState, initialPosition, jobStats); this.job = job; this.compositeBuilder = createCompositeBuilder(job.getConfig()); @@ -103,8 +107,7 @@ protected void onStart(long now, ActionListener listener) { // this is needed to exclude buckets that can still receive new documents DateHistogramGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHistogram(); // if the job has a delay we filter all documents that appear before it - long delay = dateHisto.getDelay() != null ? - TimeValue.parseTimeValue(dateHisto.getDelay().toString(), "").millis() : 0; + long delay = dateHisto.getDelay() != null ? TimeValue.parseTimeValue(dateHisto.getDelay().toString(), "").millis() : 0; maxBoundary = dateHisto.createRounding().round(now - delay); listener.onResponse(true); } catch (Exception e) { @@ -114,15 +117,12 @@ protected void onStart(long now, ActionListener listener) { protected SearchRequest buildSearchRequest() { final Map position = getPosition(); - SearchSourceBuilder searchSource = new SearchSourceBuilder() - .size(0) - .trackTotalHits(false) - // make sure we always compute complete buckets that appears before the configured delay - .query(createBoundaryQuery(position)) - .aggregation(compositeBuilder.aggregateAfter(position)); - return new SearchRequest(job.getConfig().getIndexPattern()) - .allowPartialSearchResults(false) - .source(searchSource); + SearchSourceBuilder searchSource = new SearchSourceBuilder().size(0) + .trackTotalHits(false) + // make sure we always compute complete buckets that appears before the configured delay + .query(createBoundaryQuery(position)) + .aggregation(compositeBuilder.aggregateAfter(position)); + return new SearchRequest(job.getConfig().getIndexPattern()).allowPartialSearchResults(false).source(searchSource); } @Override @@ -135,9 +135,16 @@ protected IterationResult> doProcess(SearchResponse searchRe } return new IterationResult<>( - IndexerUtils.processBuckets(response, job.getConfig().getRollupIndex(), getStats(), - job.getConfig().getGroupConfig(), job.getConfig().getId()), - response.afterKey(), response.getBuckets().isEmpty()); + IndexerUtils.processBuckets( + response, + job.getConfig().getRollupIndex(), + getStats(), + job.getConfig().getGroupConfig(), + job.getConfig().getId() + ), + response.afterKey(), + response.getBuckets().isEmpty() + ); } /** @@ -174,17 +181,14 @@ private QueryBuilder createBoundaryQuery(Map position) { assert maxBoundary < Long.MAX_VALUE; DateHistogramGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHistogram(); String fieldName = dateHisto.getField(); - String rollupFieldName = fieldName + "." + DateHistogramAggregationBuilder.NAME; + String rollupFieldName = fieldName + "." + DateHistogramAggregationBuilder.NAME; long lowerBound = 0L; if (position != null) { Number value = (Number) position.get(rollupFieldName); lowerBound = value.longValue(); } assert lowerBound <= maxBoundary; - final RangeQueryBuilder query = new RangeQueryBuilder(fieldName) - .gte(lowerBound) - .lt(maxBoundary) - .format("epoch_millis"); + final RangeQueryBuilder query = new RangeQueryBuilder(fieldName).gte(lowerBound).lt(maxBoundary).format("epoch_millis"); return query; } @@ -284,19 +288,21 @@ static List createAggregationBuilders(final List countBuilder - = new ValueCountAggregationBuilder( - formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.COUNT_FIELD)); + ValuesSourceAggregationBuilder.LeafOnly countBuilder = + new ValueCountAggregationBuilder( + formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.COUNT_FIELD) + ); countBuilder.field(field); builders.add(countBuilder); } else if (metric.equals(MetricConfig.SUM.getPreferredName())) { newBuilder = new SumAggregationBuilder(formatFieldName(field, SumAggregationBuilder.NAME, RollupField.VALUE)); } else if (metric.equals(MetricConfig.VALUE_COUNT.getPreferredName())) { // TODO allow non-numeric value_counts. - // I removed the hard coding of NUMERIC as part of cleaning up targetValueType, but I don't think that resolves + // I removed the hard coding of NUMERIC as part of cleaning up targetValueType, but I don't think that resolves // the above to do note -- Tozzi 2019-12-06 newBuilder = new ValueCountAggregationBuilder( - formatFieldName(field, ValueCountAggregationBuilder.NAME, RollupField.VALUE)); + formatFieldName(field, ValueCountAggregationBuilder.NAME, RollupField.VALUE) + ); } else { throw new IllegalArgumentException("Unsupported metric type [" + metric + "]"); } @@ -309,4 +315,3 @@ static List createAggregationBuilders(final List persistentTask, - Map headers) { - return new RollupJobTask(id, type, action, parentTaskId, persistentTask.getParams(), - (RollupJobStatus) persistentTask.getState(), client, schedulerEngine, threadPool, headers); + protected AllocatedPersistentTask createTask( + long id, + String type, + String action, + TaskId parentTaskId, + PersistentTasksCustomMetadata.PersistentTask persistentTask, + Map headers + ) { + return new RollupJobTask( + id, + type, + action, + parentTaskId, + persistentTask.getParams(), + (RollupJobStatus) persistentTask.getState(), + client, + schedulerEngine, + threadPool, + headers + ); } } @@ -99,8 +116,7 @@ protected class ClientRollupPageManager extends RollupIndexer { private final Client client; private final RollupJob job; - ClientRollupPageManager(RollupJob job, IndexerState initialState, Map initialPosition, - Client client) { + ClientRollupPageManager(RollupJob job, IndexerState initialState, Map initialPosition, Client client) { super(threadPool, job, new AtomicReference<>(initialState), initialPosition); this.client = client; this.job = job; @@ -108,14 +124,26 @@ protected class ClientRollupPageManager extends RollupIndexer { @Override protected void doNextSearch(long waitTimeInNanos, ActionListener nextPhase) { - ClientHelper.executeWithHeadersAsync(job.getHeaders(), ClientHelper.ROLLUP_ORIGIN, client, SearchAction.INSTANCE, - buildSearchRequest(), nextPhase); + ClientHelper.executeWithHeadersAsync( + job.getHeaders(), + ClientHelper.ROLLUP_ORIGIN, + client, + SearchAction.INSTANCE, + buildSearchRequest(), + nextPhase + ); } @Override protected void doNextBulk(BulkRequest request, ActionListener nextPhase) { - ClientHelper.executeWithHeadersAsync(job.getHeaders(), ClientHelper.ROLLUP_ORIGIN, client, BulkAction.INSTANCE, request, - nextPhase); + ClientHelper.executeWithHeadersAsync( + job.getHeaders(), + ClientHelper.ROLLUP_ORIGIN, + client, + BulkAction.INSTANCE, + request, + nextPhase + ); } @Override @@ -156,8 +184,18 @@ protected void onAbort() { private final Map initialPosition; private RollupIndexer indexer; - RollupJobTask(long id, String type, String action, TaskId parentTask, RollupJob job, RollupJobStatus state, - Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, Map headers) { + RollupJobTask( + long id, + String type, + String action, + TaskId parentTask, + RollupJob job, + RollupJobStatus state, + Client client, + SchedulerEngine schedulerEngine, + ThreadPool threadPool, + Map headers + ) { super(id, type, action, RollupField.NAME + "_" + job.getConfig().getId(), parentTask, headers); this.job = job; this.schedulerEngine = schedulerEngine; @@ -174,15 +212,27 @@ protected void onAbort() { } @Override - protected void init(PersistentTasksService persistentTasksService, TaskManager taskManager, - String persistentTaskId, long allocationId) { + protected void init( + PersistentTasksService persistentTasksService, + TaskManager taskManager, + String persistentTaskId, + long allocationId + ) { super.init(persistentTasksService, taskManager, persistentTaskId, allocationId); // If initial position is not null, we are resuming rather than starting fresh. IndexerState indexerState = IndexerState.STOPPED; if (initialIndexerState != null) { - logger.debug("We have existing state, setting state to [" + initialIndexerState + "] " + - "and current position to [" + initialPosition + "] for job [" + job.getConfig().getId() + "]"); + logger.debug( + "We have existing state, setting state to [" + + initialIndexerState + + "] " + + "and current position to [" + + initialPosition + + "] for job [" + + job.getConfig().getId() + + "]" + ); if (initialIndexerState.equals(IndexerState.INDEXING)) { /* * If we were indexing, we have to reset back to STARTED otherwise the indexer will be "stuck" thinking @@ -192,18 +242,22 @@ protected void init(PersistentTasksService persistentTasksService, TaskManager t } else if (initialIndexerState.equals(IndexerState.ABORTING) || initialIndexerState.equals(IndexerState.STOPPING)) { // It shouldn't be possible to persist ABORTING, but if for some reason it does, - // play it safe and restore the job as STOPPED. An admin will have to clean it up, - // but it won't be running, and won't delete itself either. Safest option. + // play it safe and restore the job as STOPPED. An admin will have to clean it up, + // but it won't be running, and won't delete itself either. Safest option. // If we were STOPPING, that means it persisted but was killed before finally stopped... so ok // to restore as STOPPED indexerState = IndexerState.STOPPED; - } else { + } else { indexerState = initialIndexerState; } } - this.indexer = new ClientRollupPageManager(job, indexerState, initialPosition, - new ParentTaskAssigningClient(client, getParentTaskId())); + this.indexer = new ClientRollupPageManager( + job, + indexerState, + initialPosition, + new ParentTaskAssigningClient(client, getParentTaskId()) + ); } @Override @@ -248,38 +302,55 @@ public synchronized void start(ActionListener lis return; } else if (prevState != IndexerState.STOPPED) { // if we're not already started/indexing, we must be STOPPED to get started - listener.onFailure(new ElasticsearchException("Cannot start task for Rollup Job [" + job.getConfig().getId() + "] because" - + " state was [" + prevState + "]")); + listener.onFailure( + new ElasticsearchException( + "Cannot start task for Rollup Job [" + job.getConfig().getId() + "] because" + " state was [" + prevState + "]" + ) + ); return; } final IndexerState newState = indexer.start(); if (newState != IndexerState.STARTED) { - listener.onFailure(new ElasticsearchException("Cannot start task for Rollup Job [" + job.getConfig().getId() + "] because" - + " new state was [" + newState + "]")); + listener.onFailure( + new ElasticsearchException( + "Cannot start task for Rollup Job [" + job.getConfig().getId() + "] because" + " new state was [" + newState + "]" + ) + ); return; } final RollupJobStatus state = new RollupJobStatus(IndexerState.STARTED, indexer.getPosition()); - logger.debug("Updating state for rollup job [" + job.getConfig().getId() + "] to [" + state.getIndexerState() + "][" + - state.getPosition() + "]"); - updatePersistentTaskState(state, - ActionListener.wrap( - (task) -> { - logger.debug("Successfully updated state for rollup job [" + job.getConfig().getId() + "] to [" - + state.getIndexerState() + "][" + state.getPosition() + "]"); - listener.onResponse(new StartRollupJobAction.Response(true)); - }, - (exc) -> { - // We were unable to update the persistent status, so we need to shutdown the indexer too. - indexer.stop(); - listener.onFailure( - new ElasticsearchException("Error while updating state for rollup job [" + job.getConfig().getId() - + "] to [" + state.getIndexerState() + "].", exc) - ); - } - ) + logger.debug( + "Updating state for rollup job [" + + job.getConfig().getId() + + "] to [" + + state.getIndexerState() + + "][" + + state.getPosition() + + "]" ); + updatePersistentTaskState(state, ActionListener.wrap((task) -> { + logger.debug( + "Successfully updated state for rollup job [" + + job.getConfig().getId() + + "] to [" + + state.getIndexerState() + + "][" + + state.getPosition() + + "]" + ); + listener.onResponse(new StartRollupJobAction.Response(true)); + }, (exc) -> { + // We were unable to update the persistent status, so we need to shutdown the indexer too. + indexer.stop(); + listener.onFailure( + new ElasticsearchException( + "Error while updating state for rollup job [" + job.getConfig().getId() + "] to [" + state.getIndexerState() + "].", + exc + ) + ); + })); } /** @@ -302,29 +373,37 @@ public synchronized void stop(ActionListener liste break; case STOPPING: - // update the persistent state to STOPPED. There are two scenarios and both are safe: + // update the persistent state to STOPPED. There are two scenarios and both are safe: // 1. we persist STOPPED now, indexer continues a bit then sees the flag and checkpoints another - // STOPPED with the more recent position. - // 2. we persist STOPPED now, indexer continues a bit but then dies. When/if we resume we'll pick up - // at last checkpoint, overwrite some docs and eventually checkpoint. + // STOPPED with the more recent position. + // 2. we persist STOPPED now, indexer continues a bit but then dies. When/if we resume we'll pick up + // at last checkpoint, overwrite some docs and eventually checkpoint. RollupJobStatus state = new RollupJobStatus(IndexerState.STOPPED, indexer.getPosition()); - updatePersistentTaskState(state, - ActionListener.wrap( - (task) -> { - logger.debug("Successfully updated state for rollup job [" + job.getConfig().getId() - + "] to [" + state.getIndexerState() + "]"); - listener.onResponse(new StopRollupJobAction.Response(true)); - }, - (exc) -> { - listener.onFailure(new ElasticsearchException("Error while updating state for rollup job [" - + job.getConfig().getId() + "] to [" + state.getIndexerState() + "].", exc)); - }) - ); + updatePersistentTaskState(state, ActionListener.wrap((task) -> { + logger.debug( + "Successfully updated state for rollup job [" + job.getConfig().getId() + "] to [" + state.getIndexerState() + "]" + ); + listener.onResponse(new StopRollupJobAction.Response(true)); + }, (exc) -> { + listener.onFailure( + new ElasticsearchException( + "Error while updating state for rollup job [" + + job.getConfig().getId() + + "] to [" + + state.getIndexerState() + + "].", + exc + ) + ); + })); break; default: - listener.onFailure(new ElasticsearchException("Cannot stop task for Rollup Job [" + job.getConfig().getId() + "] because" - + " state was [" + newState + "]")); + listener.onFailure( + new ElasticsearchException( + "Cannot stop task for Rollup Job [" + job.getConfig().getId() + "] because" + " state was [" + newState + "]" + ) + ); break; } } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java index 6ddb0bc6a23e8..79c872e6b49e3 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java @@ -26,10 +26,7 @@ public class RestDeleteRollupJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(DELETE, "/_rollup/job/{id}") - .replaces(DELETE, "/_xpack/rollup/job/{id}/", RestApiVersion.V_7).build() - ); + return List.of(Route.builder(DELETE, "/_rollup/job/{id}").replaces(DELETE, "/_xpack/rollup/job/{id}/", RestApiVersion.V_7).build()); } @Override @@ -37,16 +34,19 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String id = restRequest.param(ID.getPreferredName()); DeleteRollupJobAction.Request request = new DeleteRollupJobAction.Request(id); - return channel -> client.execute(DeleteRollupJobAction.INSTANCE, request, + return channel -> client.execute( + DeleteRollupJobAction.INSTANCE, + request, new RestToXContentListener(channel) { - @Override - protected RestStatus getStatus(DeleteRollupJobAction.Response response) { - if (response.getNodeFailures().size() > 0 || response.getTaskFailures().size() > 0) { - return RestStatus.INTERNAL_SERVER_ERROR; + @Override + protected RestStatus getStatus(DeleteRollupJobAction.Response response) { + if (response.getNodeFailures().size() > 0 || response.getTaskFailures().size() > 0) { + return RestStatus.INTERNAL_SERVER_ERROR; + } + return RestStatus.OK; } - return RestStatus.OK; } - }); + ); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java index 6a75c065109a8..9d9d934b081ea 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java @@ -25,9 +25,7 @@ public class RestGetRollupCapsAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, "/_rollup/data/{id}") - .replaces(GET, "/_xpack/rollup/data/{id}/", RestApiVersion.V_7).build()); + return List.of(Route.builder(GET, "/_rollup/data/{id}").replaces(GET, "/_xpack/rollup/data/{id}/", RestApiVersion.V_7).build()); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java index df439a8af71d2..29202ad5a4e6c 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java @@ -28,8 +28,7 @@ public class RestGetRollupIndexCapsAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(GET, "/{index}/_rollup/data") - .replaces(GET, "/{index}/_xpack/rollup/data", RestApiVersion.V_7).build() + Route.builder(GET, "/{index}/_rollup/data").replaces(GET, "/{index}/_xpack/rollup/data", RestApiVersion.V_7).build() ); } @@ -37,8 +36,7 @@ public List routes() { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String index = restRequest.param(INDEX.getPreferredName()); IndicesOptions options = IndicesOptions.fromRequest(restRequest, IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED); - GetRollupIndexCapsAction.Request request = - new GetRollupIndexCapsAction.Request(Strings.splitStringByCommaToArray(index), options); + GetRollupIndexCapsAction.Request request = new GetRollupIndexCapsAction.Request(Strings.splitStringByCommaToArray(index), options); return channel -> client.execute(GetRollupIndexCapsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java index 6d6d54396e9de..cc2b44032d3fc 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java @@ -25,10 +25,7 @@ public class RestGetRollupJobsAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(GET, "/_rollup/job/{id}") - .replaces(GET, "/_xpack/rollup/job/{id}/", RestApiVersion.V_7).build() - ); + return List.of(Route.builder(GET, "/_rollup/job/{id}").replaces(GET, "/_xpack/rollup/job/{id}/", RestApiVersion.V_7).build()); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java index 3e48dba6fa6cb..7375b9edb8ef1 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java @@ -23,10 +23,7 @@ public class RestPutRollupJobAction extends BaseRestHandler { @Override public List routes() { - return List.of( - Route.builder(PUT, "/_rollup/job/{id}") - .replaces(PUT, "/_xpack/rollup/job/{id}", RestApiVersion.V_7).build() - ); + return List.of(Route.builder(PUT, "/_rollup/job/{id}").replaces(PUT, "/_xpack/rollup/job/{id}", RestApiVersion.V_7).build()); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java index 837d9f92d58b3..1aecaf23b4574 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java @@ -31,15 +31,22 @@ public List routes() { new Route(GET, "_rollup_search"), new Route(POST, "_rollup_search"), new Route(GET, "{index}/_rollup_search"), - new Route(POST, "{index}/_rollup_search")); + new Route(POST, "{index}/_rollup_search") + ); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { SearchRequest searchRequest = new SearchRequest(); - restRequest.withContentOrSourceParamParserOrNull(parser -> - RestSearchAction.parseSearchRequest(searchRequest, restRequest, parser, - client.getNamedWriteableRegistry(), size -> searchRequest.source().size(size))); + restRequest.withContentOrSourceParamParserOrNull( + parser -> RestSearchAction.parseSearchRequest( + searchRequest, + restRequest, + parser, + client.getNamedWriteableRegistry(), + size -> searchRequest.source().size(size) + ) + ); RestSearchAction.checkRestTotalHits(restRequest, searchRequest); return channel -> client.execute(RollupSearchAction.INSTANCE, searchRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java index f65b869d8202e..dcc6f8512ab06 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java @@ -24,8 +24,7 @@ public class RestStartRollupJobAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(POST, "/_rollup/job/{id}/_start") - .replaces(POST, "/_xpack/rollup/job/{id}/_start", RestApiVersion.V_7).build() + Route.builder(POST, "/_rollup/job/{id}/_start").replaces(POST, "/_xpack/rollup/job/{id}/_start", RestApiVersion.V_7).build() ); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java index 31b3bde24457d..b7cbf7c15e5f1 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java @@ -25,8 +25,7 @@ public class RestStopRollupJobAction extends BaseRestHandler { @Override public List routes() { return List.of( - Route.builder(POST, "/_rollup/job/{id}/_stop") - .replaces(POST, "/_xpack/rollup/job/{id}/_stop", RestApiVersion.V_7).build() + Route.builder(POST, "/_rollup/job/{id}/_stop").replaces(POST, "/_xpack/rollup/job/{id}/_stop", RestApiVersion.V_7).build() ); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/CompressingOfflineSorter.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/CompressingOfflineSorter.java index 72adb9ccee508..e95b989158d3f 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/CompressingOfflineSorter.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/CompressingOfflineSorter.java @@ -31,11 +31,8 @@ * An {@link OfflineSorter} that compresses the values using a {@link Deflater}. */ class CompressingOfflineSorter extends OfflineSorter { - CompressingOfflineSorter(Directory dir, - String tempFileNamePrefix, - Comparator comparator, - int ramBufferSizeMB) { - super(dir, tempFileNamePrefix, comparator, OfflineSorter.BufferSize.megabytes(ramBufferSizeMB/2), 2, -1, null, 1); + CompressingOfflineSorter(Directory dir, String tempFileNamePrefix, Comparator comparator, int ramBufferSizeMB) { + super(dir, tempFileNamePrefix, comparator, OfflineSorter.BufferSize.megabytes(ramBufferSizeMB / 2), 2, -1, null, 1); } static class Writer extends ByteSequencesWriter { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/FieldValueFetcher.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/FieldValueFetcher.java index fa19f05b7c0f7..ff3ff9c24ba0c 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/FieldValueFetcher.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/FieldValueFetcher.java @@ -36,9 +36,7 @@ class FieldValueFetcher { final IndexFieldData fieldData; final Function valueFunc; - protected FieldValueFetcher(String name, - MappedFieldType fieldType, IndexFieldData fieldData, - Function valueFunc) { + protected FieldValueFetcher(String name, MappedFieldType fieldType, IndexFieldData fieldData, Function valueFunc) { this.name = name; this.fieldType = fieldType; this.format = fieldType.docValueFormat(null, null); @@ -109,8 +107,9 @@ static List buildHistograms(SearchExecutionContext context, S static Function getValidator(String field) { return value -> { if (VALID_TYPES.contains(value.getClass()) == false) { - throw new IllegalArgumentException("Expected [" + VALID_TYPES + "] for field [" + field + "], " + - "got [" + value.getClass() + "]"); + throw new IllegalArgumentException( + "Expected [" + VALID_TYPES + "] for field [" + field + "], " + "got [" + value.getClass() + "]" + ); } return value; }; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RestRollupAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RestRollupAction.java index 7972dd2c0b046..c87cafb9ef9cb 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RestRollupAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RestRollupAction.java @@ -11,8 +11,8 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.core.rollup.action.RollupAction; import org.elasticsearch.xpack.core.rollup.RollupActionConfig; +import org.elasticsearch.xpack.core.rollup.action.RollupAction; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java index d3a0e6787eaf8..6cd918afee846 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java @@ -107,12 +107,14 @@ class RollupShardIndexer { final Set tmpFiles = new HashSet<>(); final Set tmpFilesDeleted = new HashSet<>(); - RollupShardIndexer(Client client, - IndexService indexService, - ShardId shardId, - RollupActionConfig config, - String tmpIndex, - int ramBufferSizeMB) { + RollupShardIndexer( + Client client, + IndexService indexService, + ShardId shardId, + RollupActionConfig config, + String tmpIndex, + int ramBufferSizeMB + ) { this.client = client; this.indexShard = indexService.getShard(shardId.id()); this.config = config; @@ -162,8 +164,9 @@ public void deleteFile(String name) throws IOException { if (config.getGroupConfig().getHistogram() != null) { HistogramGroupConfig histoConfig = config.getGroupConfig().getHistogram(); - this.groupFieldFetchers.addAll(FieldValueFetcher.buildHistograms(searchExecutionContext, - histoConfig.getFields(), histoConfig.getInterval())); + this.groupFieldFetchers.addAll( + FieldValueFetcher.buildHistograms(searchExecutionContext, histoConfig.getFields(), histoConfig.getInterval()) + ); } if (config.getMetricsConfig().size() > 0) { @@ -187,11 +190,10 @@ private void verifyTimestampField(MappedFieldType fieldType) { throw new IllegalArgumentException("fieldType is null"); } if (fieldType instanceof DateFieldMapper.DateFieldType == false) { - throw new IllegalArgumentException("Wrong type for the timestamp field, " + - "expected [date], got [" + fieldType.name() + "]"); + throw new IllegalArgumentException("Wrong type for the timestamp field, " + "expected [date], got [" + fieldType.name() + "]"); } if (fieldType.isSearchable() == false) { - throw new IllegalArgumentException("The timestamp field [" + fieldType.name() + "] is not searchable"); + throw new IllegalArgumentException("The timestamp field [" + fieldType.name() + "] is not searchable"); } } @@ -203,7 +205,7 @@ public long execute() throws IOException { } while (bucket != null); } // TODO: check that numIndexed == numSent, otherwise throw an exception - logger.info("Successfully sent [" + numIndexed.get() + "], indexed [" + numIndexed.get() + "]"); + logger.info("Successfully sent [" + numIndexed.get() + "], indexed [" + numIndexed.get() + "]"); return numIndexed.get(); } @@ -220,8 +222,13 @@ public void afterBulk(long executionId, BulkRequest request, BulkResponse respon if (response.hasFailures()) { Map failures = Arrays.stream(response.getItems()) .filter(BulkItemResponse::isFailed) - .collect(Collectors.toMap(BulkItemResponse::getId, BulkItemResponse::getFailureMessage, - (msg1, msg2) -> Objects.equals(msg1, msg2) ? msg1 : msg1 + "," + msg2)); + .collect( + Collectors.toMap( + BulkItemResponse::getId, + BulkItemResponse::getFailureMessage, + (msg1, msg2) -> Objects.equals(msg1, msg2) ? msg1 : msg1 + "," + msg2 + ) + ); logger.error("failures: [{}]", failures); } } @@ -257,9 +264,7 @@ private Rounding createRounding(RollupActionDateHistogramGroupConfig config) { return tzRoundingBuilder.timeZone(zoneId).build(); } - private void indexBucket(BucketKey key, - List fieldsMetrics, - int docCount) { + private void indexBucket(BucketKey key, List fieldsMetrics, int docCount) { IndexRequestBuilder request = client.prepareIndex(tmpIndex); Map doc = new HashMap<>(2 + key.groupFields.size() + fieldsMetrics.size()); doc.put(DocCountFieldMapper.NAME, docCount); @@ -320,7 +325,7 @@ private Long computeBucket(long lastRounding) throws IOException { } } } - ++ docCount; + ++docCount; lastKey = key; } next = it.next(); @@ -339,8 +344,7 @@ private Long findNextRounding(long lastRounding) throws IOException { final NextRoundingVisitor visitor = new NextRoundingVisitor(rounding, lastRounding); try { pointValues.intersect(visitor); - } catch (CollectionTerminatedException exc) { - } + } catch (CollectionTerminatedException exc) {} if (visitor.nextRounding != null) { nextRounding = nextRounding == null ? visitor.nextRounding : Math.min(nextRounding, visitor.nextRounding); } @@ -376,16 +380,20 @@ private static Comparator keyComparator() { return (o1, o2) -> { int keySize1 = readInt(o1.bytes, o1.offset); int keySize2 = readInt(o2.bytes, o2.offset); - return FutureArrays.compareUnsigned(o1.bytes, o1.offset + Integer.BYTES, keySize1 + o1.offset + Integer.BYTES, - o2.bytes, o2.offset + Integer.BYTES, keySize2 + o2.offset + Integer.BYTES); + return FutureArrays.compareUnsigned( + o1.bytes, + o1.offset + Integer.BYTES, + keySize1 + o1.offset + Integer.BYTES, + o2.bytes, + o2.offset + Integer.BYTES, + keySize2 + o2.offset + Integer.BYTES + ); }; } private static int readInt(byte[] bytes, int offset) { - return ((bytes[offset] & 0xFF) << 24) - | ((bytes[offset + 1] & 0xFF) << 16) - | ((bytes[offset + 2] & 0xFF) << 8) - | (bytes[offset + 3] & 0xFF); + return ((bytes[offset] & 0xFF) << 24) | ((bytes[offset + 1] & 0xFF) << 16) | ((bytes[offset + 2] & 0xFF) << 8) | (bytes[offset + 3] + & 0xFF); } private static class BucketKey { @@ -402,8 +410,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; BucketKey other = (BucketKey) o; - return timestamp == other.timestamp && - Objects.equals(groupFields, other.groupFields); + return timestamp == other.timestamp && Objects.equals(groupFields, other.groupFields); } @Override @@ -413,10 +420,7 @@ public int hashCode() { @Override public String toString() { - return "BucketKey{" + - "timestamp=" + timestamp + - ", groupFields=" + groupFields + - '}'; + return "BucketKey{" + "timestamp=" + timestamp + ", groupFields=" + groupFields + '}'; } } @@ -424,8 +428,7 @@ private class BucketCollector implements Collector { private final long timestamp; private final XExternalRefSorter externalSorter; - private BucketCollector(long timestamp, - XExternalRefSorter externalSorter) { + private BucketCollector(long timestamp, XExternalRefSorter externalSorter) { this.externalSorter = externalSorter; this.timestamp = timestamp; } @@ -436,8 +439,7 @@ public LeafCollector getLeafCollector(LeafReaderContext context) { final List metricsFieldLeaves = leafFetchers(context, metricsFieldFetchers); return new LeafCollector() { @Override - public void setScorer(Scorable scorer) { - } + public void setScorer(Scorable scorer) {} @Override public void collect(int docID) throws IOException { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupAction.java index 3a5296cf1f768..eceb906ccb4f0 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupAction.java @@ -73,23 +73,37 @@ public class TransportRollupAction extends AcknowledgedTransportMasterNodeAction private final MetadataCreateIndexService metadataCreateIndexService; @Inject - public TransportRollupAction(Client client, - ClusterService clusterService, - TransportService transportService, - ThreadPool threadPool, - MetadataCreateIndexService metadataCreateIndexService, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { - super(RollupAction.NAME, transportService, clusterService, threadPool, actionFilters, RollupAction.Request::new, - indexNameExpressionResolver, ThreadPool.Names.SAME); + public TransportRollupAction( + Client client, + ClusterService clusterService, + TransportService transportService, + ThreadPool threadPool, + MetadataCreateIndexService metadataCreateIndexService, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver + ) { + super( + RollupAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + RollupAction.Request::new, + indexNameExpressionResolver, + ThreadPool.Names.SAME + ); this.client = new OriginSettingClient(client, ClientHelper.ROLLUP_ORIGIN); this.clusterService = clusterService; this.metadataCreateIndexService = metadataCreateIndexService; } @Override - protected void masterOperation(Task task, RollupAction.Request request, ClusterState state, - ActionListener listener) throws IOException { + protected void masterOperation( + Task task, + RollupAction.Request request, + ClusterState state, + ActionListener listener + ) throws IOException { String originalIndexName = request.getSourceIndex(); final String rollupIndexName; @@ -109,30 +123,34 @@ protected void masterOperation(Task task, RollupAction.Request request, ClusterS return; } - FieldCapabilitiesRequest fieldCapsRequest = new FieldCapabilitiesRequest() - .indices(originalIndexName) + FieldCapabilitiesRequest fieldCapsRequest = new FieldCapabilitiesRequest().indices(originalIndexName) .fields(request.getRollupConfig().getAllFields().toArray(new String[0])); fieldCapsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); // Add the source index name and UUID to the rollup index metadata. If the original index is a rollup index itself, // we will add the name and UUID of the raw index that we initially rolled up. IndexMetadata originalIndexMetadata = state.getMetadata().index(originalIndexName); - String sourceIndexName = IndexMetadata.INDEX_ROLLUP_SOURCE_NAME.exists(originalIndexMetadata.getSettings()) ? - IndexMetadata.INDEX_ROLLUP_SOURCE_NAME.get(originalIndexMetadata.getSettings()) : originalIndexName; - String sourceIndexUuid = IndexMetadata.INDEX_ROLLUP_SOURCE_UUID.exists(originalIndexMetadata.getSettings()) ? - IndexMetadata.INDEX_ROLLUP_SOURCE_UUID.get(originalIndexMetadata.getSettings()) : originalIndexMetadata.getIndexUUID(); - - CreateIndexClusterStateUpdateRequest createIndexClusterStateUpdateRequest = - new CreateIndexClusterStateUpdateRequest("rollup", tmpIndexName, tmpIndexName) - .settings(Settings.builder().put(IndexMetadata.SETTING_INDEX_HIDDEN, true).build()) - .mappings(XContentHelper.convertToJson(BytesReference.bytes(mapping), false, XContentType.JSON)); + String sourceIndexName = IndexMetadata.INDEX_ROLLUP_SOURCE_NAME.exists(originalIndexMetadata.getSettings()) + ? IndexMetadata.INDEX_ROLLUP_SOURCE_NAME.get(originalIndexMetadata.getSettings()) + : originalIndexName; + String sourceIndexUuid = IndexMetadata.INDEX_ROLLUP_SOURCE_UUID.exists(originalIndexMetadata.getSettings()) + ? IndexMetadata.INDEX_ROLLUP_SOURCE_UUID.get(originalIndexMetadata.getSettings()) + : originalIndexMetadata.getIndexUUID(); + + CreateIndexClusterStateUpdateRequest createIndexClusterStateUpdateRequest = new CreateIndexClusterStateUpdateRequest( + "rollup", + tmpIndexName, + tmpIndexName + ).settings(Settings.builder().put(IndexMetadata.SETTING_INDEX_HIDDEN, true).build()) + .mappings(XContentHelper.convertToJson(BytesReference.bytes(mapping), false, XContentType.JSON)); RollupIndexerAction.Request rollupIndexerRequest = new RollupIndexerAction.Request(request); ResizeRequest resizeRequest = new ResizeRequest(request.getRollupIndex(), tmpIndexName); resizeRequest.setResizeType(ResizeType.CLONE); - resizeRequest.getTargetIndexRequest() - .settings(Settings.builder().put(IndexMetadata.SETTING_INDEX_HIDDEN, false).build()); + resizeRequest.getTargetIndexRequest().settings(Settings.builder().put(IndexMetadata.SETTING_INDEX_HIDDEN, false).build()); UpdateSettingsRequest updateSettingsReq = new UpdateSettingsRequest( - Settings.builder().put(IndexMetadata.SETTING_BLOCKS_WRITE, true).build(), tmpIndexName); + Settings.builder().put(IndexMetadata.SETTING_BLOCKS_WRITE, true).build(), + tmpIndexName + ); // 1. validate Rollup Config against Field Caps // 2. create hidden temporary index @@ -147,8 +165,9 @@ protected void masterOperation(Task task, RollupAction.Request request, ClusterS client.fieldCaps(fieldCapsRequest, ActionListener.wrap(fieldCapsResponse -> { RollupActionRequestValidationException validationException = new RollupActionRequestValidationException(); if (fieldCapsResponse.get().size() == 0) { - validationException.addValidationError("Could not find any fields in the index [" - + originalIndexName + "] that were configured in job"); + validationException.addValidationError( + "Could not find any fields in the index [" + originalIndexName + "] that were configured in job" + ); listener.onFailure(validationException); return; } @@ -162,13 +181,20 @@ protected void masterOperation(Task task, RollupAction.Request request, ClusterS clusterService.submitStateUpdateTask("rollup create index", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) throws Exception { - return metadataCreateIndexService - .applyCreateIndexRequest(currentState, createIndexClusterStateUpdateRequest, true, - (builder, indexMetadata) -> builder.put(IndexMetadata.builder(indexMetadata).settings(Settings.builder() - .put(indexMetadata.getSettings()) - .put(IndexMetadata.INDEX_ROLLUP_SOURCE_NAME.getKey(), sourceIndexName) - .put(IndexMetadata.INDEX_ROLLUP_SOURCE_UUID.getKey(), sourceIndexUuid) - ))); + return metadataCreateIndexService.applyCreateIndexRequest( + currentState, + createIndexClusterStateUpdateRequest, + true, + (builder, indexMetadata) -> builder.put( + IndexMetadata.builder(indexMetadata) + .settings( + Settings.builder() + .put(indexMetadata.getSettings()) + .put(IndexMetadata.INDEX_ROLLUP_SOURCE_NAME.getKey(), sourceIndexName) + .put(IndexMetadata.INDEX_ROLLUP_SOURCE_UUID.getKey(), sourceIndexUuid) + ) + ) + ); } public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { @@ -185,18 +211,30 @@ public void clusterStateProcessed(String source, ClusterState oldState, ClusterS // 6. publishMetadata(originalIndexName, tmpIndexName, rollupIndexName, listener); } else { - deleteTmpIndex(originalIndexName, tmpIndexName, listener, - new ElasticsearchException("Unable to resize temp rollup index [" + tmpIndexName + "]")); + deleteTmpIndex( + originalIndexName, + tmpIndexName, + listener, + new ElasticsearchException("Unable to resize temp rollup index [" + tmpIndexName + "]") + ); } }, e -> deleteTmpIndex(originalIndexName, tmpIndexName, listener, e))); } else { - deleteTmpIndex(originalIndexName, tmpIndexName, listener, - new ElasticsearchException("Unable to update settings of temp rollup index [" +tmpIndexName+ "]")); + deleteTmpIndex( + originalIndexName, + tmpIndexName, + listener, + new ElasticsearchException("Unable to update settings of temp rollup index [" + tmpIndexName + "]") + ); } }, e -> deleteTmpIndex(originalIndexName, tmpIndexName, listener, e))); } else { - deleteTmpIndex(originalIndexName, tmpIndexName, listener, - new ElasticsearchException("Unable to index into temp rollup index [" + tmpIndexName + "]")); + deleteTmpIndex( + originalIndexName, + tmpIndexName, + listener, + new ElasticsearchException("Unable to index into temp rollup index [" + tmpIndexName + "]") + ); } }, e -> deleteTmpIndex(originalIndexName, tmpIndexName, listener, e))); } @@ -226,14 +264,14 @@ private XContentBuilder getMapping(RollupActionConfig config) throws IOException */ private static XContentBuilder getDynamicTemplates(XContentBuilder builder) throws IOException { return builder.startArray("dynamic_templates") - .startObject() - .startObject("strings") - .field("match_mapping_type", "string") - .startObject("mapping") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() + .startObject() + .startObject("strings") + .field("match_mapping_type", "string") + .startObject("mapping") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() .endArray(); } @@ -248,24 +286,27 @@ private static XContentBuilder getProperties(XContentBuilder builder, RollupActi String dateField = dateHistogramConfig.getField(); String dateIntervalType = dateHistogramConfig.getIntervalTypeName(); String dateInterval = dateHistogramConfig.getInterval().toString(); - String tz = dateHistogramConfig.getTimeZone() != null ? dateHistogramConfig.getTimeZone() : - RollupActionDateHistogramGroupConfig.DEFAULT_TIMEZONE; - - builder.startObject(dateField).field("type", DateFieldMapper.CONTENT_TYPE) - .startObject("meta") - .field(dateIntervalType, dateInterval) - .field(RollupActionDateHistogramGroupConfig.CalendarInterval.TIME_ZONE, tz) - .endObject() + String tz = dateHistogramConfig.getTimeZone() != null + ? dateHistogramConfig.getTimeZone() + : RollupActionDateHistogramGroupConfig.DEFAULT_TIMEZONE; + + builder.startObject(dateField) + .field("type", DateFieldMapper.CONTENT_TYPE) + .startObject("meta") + .field(dateIntervalType, dateInterval) + .field(RollupActionDateHistogramGroupConfig.CalendarInterval.TIME_ZONE, tz) + .endObject() .endObject(); HistogramGroupConfig histogramGroupConfig = groupConfig.getHistogram(); if (histogramGroupConfig != null) { for (String field : histogramGroupConfig.getFields()) { - builder.startObject(field).field("type", NumberFieldMapper.NumberType.DOUBLE.typeName()) + builder.startObject(field) + .field("type", NumberFieldMapper.NumberType.DOUBLE.typeName()) .startObject("meta") - .field(HistogramGroupConfig.INTERVAL, String.valueOf(histogramGroupConfig.getInterval())) + .field(HistogramGroupConfig.INTERVAL, String.valueOf(histogramGroupConfig.getInterval())) .endObject() - .endObject(); + .endObject(); } } @@ -283,8 +324,12 @@ private static XContentBuilder getProperties(XContentBuilder builder, RollupActi return builder.endObject(); } - private void publishMetadata(String originalIndexName, String tmpIndexName, String rollupIndexName, - ActionListener listener) { + private void publishMetadata( + String originalIndexName, + String tmpIndexName, + String rollupIndexName, + ActionListener listener + ) { // Update rollup metadata to include this index clusterService.submitStateUpdateTask("update-rollup-metadata", new ClusterStateUpdateTask() { @Override @@ -308,8 +353,13 @@ public ClusterState execute(ClusterState currentState) { // considered a write index backingIndices.add(rollupIndex); backingIndices.addAll(originalDataStream.getIndices()); - DataStream dataStream = new DataStream(originalDataStream.getName(), originalDataStream.getTimeStampField(), - backingIndices, originalDataStream.getGeneration(), originalDataStream.getMetadata()); + DataStream dataStream = new DataStream( + originalDataStream.getName(), + originalDataStream.getTimeStampField(), + backingIndices, + originalDataStream.getGeneration(), + originalDataStream.getMetadata() + ); metadataBuilder.put(dataStream); } return ClusterState.builder(currentState).metadata(metadataBuilder.build()).build(); @@ -317,8 +367,12 @@ public ClusterState execute(ClusterState currentState) { @Override public void onFailure(String source, Exception e) { - deleteTmpIndex(originalIndexName, tmpIndexName, - listener, new ElasticsearchException("failed to publish new cluster state with rollup metadata", e)); + deleteTmpIndex( + originalIndexName, + tmpIndexName, + listener, + new ElasticsearchException("failed to publish new cluster state with rollup metadata", e) + ); } }); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupIndexerAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupIndexerAction.java index 0e65c3e203bdb..b550365378f16 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupIndexerAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupIndexerAction.java @@ -43,9 +43,11 @@ * * TODO: Enforce that we don't retry on another replica if we throw an error after sending some buckets. */ -public class TransportRollupIndexerAction - extends TransportBroadcastAction { +public class TransportRollupIndexerAction extends TransportBroadcastAction< + RollupIndexerAction.Request, + RollupIndexerAction.Response, + RollupIndexerAction.ShardRequest, + RollupIndexerAction.ShardResponse> { private static final int SORTER_RAM_SIZE_MB = 100; @@ -54,24 +56,35 @@ public class TransportRollupIndexerAction private final IndicesService indicesService; @Inject - public TransportRollupIndexerAction(Client client, - ClusterService clusterService, - TransportService transportService, - IndicesService indicesService, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { - super(RollupIndexerAction.NAME, clusterService, transportService, actionFilters, - indexNameExpressionResolver, RollupIndexerAction.Request::new, RollupIndexerAction.ShardRequest::new, - TASK_THREAD_POOL_NAME); + public TransportRollupIndexerAction( + Client client, + ClusterService clusterService, + TransportService transportService, + IndicesService indicesService, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver + ) { + super( + RollupIndexerAction.NAME, + clusterService, + transportService, + actionFilters, + indexNameExpressionResolver, + RollupIndexerAction.Request::new, + RollupIndexerAction.ShardRequest::new, + TASK_THREAD_POOL_NAME + ); this.client = new OriginSettingClient(client, ClientHelper.ROLLUP_ORIGIN); this.clusterService = clusterService; this.indicesService = indicesService; } @Override - protected GroupShardsIterator shards(ClusterState clusterState, - RollupIndexerAction.Request request, - String[] concreteIndices) { + protected GroupShardsIterator shards( + ClusterState clusterState, + RollupIndexerAction.Request request, + String[] concreteIndices + ) { if (concreteIndices.length > 1) { throw new IllegalArgumentException("multiple indices: " + Arrays.toString(concreteIndices)); } @@ -87,8 +100,7 @@ protected ClusterBlockException checkGlobalBlock(ClusterState state, RollupIndex } @Override - protected ClusterBlockException checkRequestBlock(ClusterState state, RollupIndexerAction.Request request, - String[] concreteIndices) { + protected ClusterBlockException checkRequestBlock(ClusterState state, RollupIndexerAction.Request request, String[] concreteIndices) { return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, concreteIndices); } @@ -98,17 +110,22 @@ protected void doExecute(Task task, RollupIndexerAction.Request request, ActionL } @Override - protected RollupIndexerAction.ShardRequest newShardRequest(int numShards, ShardRouting shard, - RollupIndexerAction.Request request) { + protected RollupIndexerAction.ShardRequest newShardRequest(int numShards, ShardRouting shard, RollupIndexerAction.Request request) { return new RollupIndexerAction.ShardRequest(shard.shardId(), request); } @Override protected RollupIndexerAction.ShardResponse shardOperation(RollupIndexerAction.ShardRequest request, Task task) throws IOException { IndexService indexService = indicesService.indexService(request.shardId().getIndex()); - String tmpIndexName = ".rolluptmp-" + request.getRollupIndex(); - RollupShardIndexer indexer = new RollupShardIndexer(client, indexService, request.shardId(), - request.getRollupConfig(), tmpIndexName, SORTER_RAM_SIZE_MB); + String tmpIndexName = ".rolluptmp-" + request.getRollupIndex(); + RollupShardIndexer indexer = new RollupShardIndexer( + client, + indexService, + request.shardId(), + request.getRollupConfig(), + tmpIndexName, + SORTER_RAM_SIZE_MB + ); indexer.execute(); return new RollupIndexerAction.ShardResponse(request.shardId()); } @@ -119,9 +136,11 @@ protected RollupIndexerAction.ShardResponse readShardResponse(StreamInput in) th } @Override - protected RollupIndexerAction.Response newResponse(RollupIndexerAction.Request request, - AtomicReferenceArray shardsResponses, - ClusterState clusterState) { + protected RollupIndexerAction.Response newResponse( + RollupIndexerAction.Request request, + AtomicReferenceArray shardsResponses, + ClusterState clusterState + ) { for (int i = 0; i < shardsResponses.length(); i++) { Object shardResponse = shardsResponses.get(i); if (shardResponse == null) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/XExternalRefSorter.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/XExternalRefSorter.java index f10913daadb20..781e3a11afb54 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/XExternalRefSorter.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/XExternalRefSorter.java @@ -81,8 +81,10 @@ public BytesRefIterator iterator() throws IOException { input = null; } - OfflineSorter.ByteSequencesReader reader = - sorter.getReader(sorter.getDirectory().openChecksumInput(sortedFileName, IOContext.READONCE), sortedFileName); + OfflineSorter.ByteSequencesReader reader = sorter.getReader( + sorter.getDirectory().openChecksumInput(sortedFileName, IOContext.READONCE), + sortedFileName + ); return new ByteSequenceIterator(reader); } @@ -103,9 +105,7 @@ public void close() throws IOException { closeWriter(); } finally { if (input == null) { - deleteFilesIgnoringExceptions(sorter.getDirectory(), - input == null ? null : input.getName(), - sortedFileName); + deleteFilesIgnoringExceptions(sorter.getDirectory(), input == null ? null : input.getName(), sortedFileName); } } } @@ -146,7 +146,7 @@ public Comparator getComparator() { } private static void deleteFilesIgnoringExceptions(Directory dir, String... files) { - for(String name : files) { + for (String name : files) { if (name != null) { try { dir.deleteFile(name); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/LocalStateRollup.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/LocalStateRollup.java index 8115532353a3d..406e6544dca74 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/LocalStateRollup.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/LocalStateRollup.java @@ -18,4 +18,3 @@ public LocalStateRollup(final Settings settings, final Path configPath) throws E plugins.add(new Rollup(settings)); } } - diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java index d8bd7405f2c94..8553c104cb456 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java @@ -24,20 +24,17 @@ public class RollupInfoTransportActionTests extends ESTestCase { public void testAvailable() { - RollupInfoTransportAction featureSet = new RollupInfoTransportAction( - mock(TransportService.class), mock(ActionFilters.class)); + RollupInfoTransportAction featureSet = new RollupInfoTransportAction(mock(TransportService.class), mock(ActionFilters.class)); assertThat(featureSet.available(), is(true)); } public void testEnabledDefault() { - RollupInfoTransportAction featureSet = new RollupInfoTransportAction( - mock(TransportService.class), mock(ActionFilters.class)); + RollupInfoTransportAction featureSet = new RollupInfoTransportAction(mock(TransportService.class), mock(ActionFilters.class)); assertThat(featureSet.enabled(), is(true)); } public void testUsage() throws ExecutionException, InterruptedException, IOException { - var usageAction = new RollupUsageTransportAction(mock(TransportService.class), null, null, - mock(ActionFilters.class), null); + var usageAction = new RollupUsageTransportAction(mock(TransportService.class), null, null, mock(ActionFilters.class), null); PlainActionFuture future = new PlainActionFuture<>(); usageAction.masterOperation(null, null, null, future); XPackFeatureSet.Usage rollupUsage = future.get().getUsage(); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java index c37bc472d28ce..aa49b9d9bbbf4 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java @@ -46,12 +46,12 @@ public class RollupJobIdentifierUtilTests extends ESTestCase { public void testOneMatch() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(job.getGroupConfig().getDateHistogram().getInterval()); + .calendarInterval(job.getGroupConfig().getDateHistogram().getInterval()); Set bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); assertThat(bestCaps.size(), equalTo(1)); @@ -59,12 +59,12 @@ public void testOneMatch() { public void testBiggerButCompatibleInterval() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(new DateHistogramInterval("1d")); + .calendarInterval(new DateHistogramInterval("1d")); Set bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); assertThat(bestCaps.size(), equalTo(1)); @@ -72,7 +72,7 @@ public void testBiggerButCompatibleInterval() { public void testBiggerButCompatibleFixedInterval() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("100s"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); @@ -85,7 +85,7 @@ public void testBiggerButCompatibleFixedInterval() { public void testBiggerButCompatibleFixedIntervalInCalFormat() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); @@ -98,7 +98,7 @@ public void testBiggerButCompatibleFixedIntervalInCalFormat() { public void testBiggerButCompatibleFixedMillisInterval() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("100ms"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); @@ -111,21 +111,26 @@ public void testBiggerButCompatibleFixedMillisInterval() { public void testIncompatibleInterval() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(new DateHistogramInterval("1h")); + .calendarInterval(new DateHistogramInterval("1h")); RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [date_histogram] agg on field " + - "[foo] which also satisfies all requirements of query.")); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [date_histogram] agg on field " + + "[foo] which also satisfies all requirements of query." + ) + ); } public void testIncompatibleFixedCalendarInterval() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("5d"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); @@ -133,30 +138,41 @@ public void testIncompatibleFixedCalendarInterval() { .calendarInterval(new DateHistogramInterval("day")); RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [date_histogram] agg on field " + - "[foo] which also satisfies all requirements of query.")); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [date_histogram] agg on field " + + "[foo] which also satisfies all requirements of query." + ) + ); } public void testBadTimeZone() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"), - null, "CET")); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final GroupConfig group = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"), null, "CET") + ); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(new DateHistogramInterval("1h")) - .timeZone(ZoneOffset.UTC); + .calendarInterval(new DateHistogramInterval("1h")) + .timeZone(ZoneOffset.UTC); RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [date_histogram] agg on field " + - "[foo] which also satisfies all requirements of query.")); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [date_histogram] agg on field " + + "[foo] which also satisfies all requirements of query." + ) + ); } public void testMetricOnlyAgg() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final List metrics = singletonList(new MetricConfig("bar", singletonList("max"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); @@ -168,34 +184,46 @@ public void testMetricOnlyAgg() { public void testOneOfTwoMatchingCaps() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(new DateHistogramInterval("1h")) - .subAggregation(new MaxAggregationBuilder("the_max").field("bar")); + .calendarInterval(new DateHistogramInterval("1h")) + .subAggregation(new MaxAggregationBuilder("the_max").field("bar")); RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [max] agg with name [the_max] which also satisfies " + - "all requirements of query.")); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [max] agg with name [the_max] which also satisfies " + "all requirements of query." + ) + ); } public void testTwoJobsSameRollupIndex() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = new HashSet<>(2); caps.add(cap); final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig job2 = - new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); + final RollupJobConfig job2 = new RollupJobConfig( + "foo2", + "index", + job.getRollupIndex(), + "*/5 * * * * ?", + 10, + group2, + emptyList(), + null + ); RollupJobCaps cap2 = new RollupJobCaps(job2); caps.add(cap2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(new DateHistogramInterval("1h")); + .calendarInterval(new DateHistogramInterval("1h")); Set bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); @@ -206,38 +234,50 @@ public void testTwoJobsSameRollupIndex() { public void testTwoJobsButBothPartialMatches() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final List metrics = singletonList(new MetricConfig("bar", singletonList("max"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = new HashSet<>(2); caps.add(cap); // TODO Is it what we really want to test? - final RollupJobConfig job2 = new RollupJobConfig("foo2", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job2 = new RollupJobConfig("foo2", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap2 = new RollupJobCaps(job2); caps.add(cap2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(new DateHistogramInterval("1h")) - .subAggregation(new MaxAggregationBuilder("the_max").field("bar")) // <-- comes from job1 - .subAggregation(new MinAggregationBuilder("the_min").field("bar")); // <-- comes from job2 + .calendarInterval(new DateHistogramInterval("1h")) + .subAggregation(new MaxAggregationBuilder("the_max").field("bar")) // <-- comes from job1 + .subAggregation(new MinAggregationBuilder("the_min").field("bar")); // <-- comes from job2 RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [min] agg with name [the_min] which also " + - "satisfies all requirements of query.")); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [min] agg with name [the_min] which also " + "satisfies all requirements of query." + ) + ); } public void testComparableDifferentDateIntervals() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"))); - final RollupJobConfig job2 = - new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); + final RollupJobConfig job2 = new RollupJobConfig( + "foo2", + "index", + job.getRollupIndex(), + "*/5 * * * * ?", + 10, + group2, + emptyList(), + null + ); RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(new DateHistogramInterval("1d")); + .calendarInterval(new DateHistogramInterval("1d")); Set caps = new HashSet<>(2); caps.add(cap); @@ -250,16 +290,24 @@ public void testComparableDifferentDateIntervals() { public void testComparableDifferentDateIntervalsOnlyOneWorks() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"))); - final RollupJobConfig job2 = - new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); + final RollupJobConfig job2 = new RollupJobConfig( + "foo2", + "index", + job.getRollupIndex(), + "*/5 * * * * ?", + 10, + group2, + emptyList(), + null + ); RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(new DateHistogramInterval("1h")); + .calendarInterval(new DateHistogramInterval("1h")); Set caps = new HashSet<>(2); caps.add(cap); @@ -272,19 +320,30 @@ public void testComparableDifferentDateIntervalsOnlyOneWorks() { public void testComparableNoHistoVsHisto() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); final HistogramGroupConfig histoConfig = new HistogramGroupConfig(100L, "bar"); - final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), - histoConfig, null); - final RollupJobConfig job2 = - new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); + final GroupConfig group2 = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), + histoConfig, + null + ); + final RollupJobConfig job2 = new RollupJobConfig( + "foo2", + "index", + job.getRollupIndex(), + "*/5 * * * * ?", + 10, + group2, + emptyList(), + null + ); RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(new DateHistogramInterval("1h")) - .subAggregation(new HistogramAggregationBuilder("histo").field("bar").interval(100)); + .calendarInterval(new DateHistogramInterval("1h")) + .subAggregation(new HistogramAggregationBuilder("histo").field("bar").interval(100)); Set caps = new HashSet<>(2); caps.add(cap); @@ -297,19 +356,30 @@ public void testComparableNoHistoVsHisto() { public void testComparableNoTermsVsTerms() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); final TermsGroupConfig termsConfig = new TermsGroupConfig("bar"); - final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), - null, termsConfig); - final RollupJobConfig job2 = - new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); + final GroupConfig group2 = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), + null, + termsConfig + ); + final RollupJobConfig job2 = new RollupJobConfig( + "foo2", + "index", + job.getRollupIndex(), + "*/5 * * * * ?", + 10, + group2, + emptyList(), + null + ); RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(new DateHistogramInterval("1h")) - .subAggregation(new TermsAggregationBuilder("histo").userValueTypeHint(ValueType.STRING).field("bar")); + .calendarInterval(new DateHistogramInterval("1h")) + .subAggregation(new TermsAggregationBuilder("histo").userValueTypeHint(ValueType.STRING).field("bar")); Set caps = new HashSet<>(2); caps.add(cap); @@ -323,131 +393,165 @@ public void testComparableNoTermsVsTerms() { public void testHistoSameNameWrongTypeInCaps() { HistogramAggregationBuilder histo = new HistogramAggregationBuilder("test_histo"); histo.field("foo") - .interval(1L) - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + .interval(1L) + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); final GroupConfig group = new GroupConfig( - // NOTE same name but wrong type - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), - new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name - null - ); - final List metrics = - Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg"))); - - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); + // NOTE same name but wrong type + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), + new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name + null + ); + final List metrics = Arrays.asList( + new MetricConfig("max_field", singletonList("max")), + new MetricConfig("avg_field", singletonList("avg")) + ); + + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); Set caps = singletonSet(new RollupJobCaps(job)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [histogram] " + - "agg on field [foo] which also satisfies all requirements of query.")); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [histogram] " + "agg on field [foo] which also satisfies all requirements of query." + ) + ); } public void testMissingDateHisto() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); histo.calendarInterval(new DateHistogramInterval("1d")) - .field("other_field") - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + .field("other_field") + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); final GroupConfig group = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()) - ); - final List metrics = - Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg"))); - - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()) + ); + final List metrics = Arrays.asList( + new MetricConfig("max_field", singletonList("max")), + new MetricConfig("avg_field", singletonList("avg")) + ); + + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); Set caps = singletonSet(new RollupJobCaps(job)); - Exception e = expectThrows(IllegalArgumentException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo,caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [date_histogram] agg on field " + - "[other_field] which also satisfies all requirements of query.")); + Exception e = expectThrows(IllegalArgumentException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [date_histogram] agg on field " + + "[other_field] which also satisfies all requirements of query." + ) + ); } public void testNoMatchingInterval() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); histo.fixedInterval(new DateHistogramInterval("1ms")) - .field("foo") - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + .field("foo") + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); final GroupConfig group = new GroupConfig( - // interval in job is much higher than agg interval above - new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("100d"), null, DateTimeZone.UTC.getID()) - ); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + // interval in job is much higher than agg interval above + new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("100d"), null, DateTimeZone.UTC.getID()) + ); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); Set caps = singletonSet(new RollupJobCaps(job)); Exception e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [date_histogram] agg on field [foo] " + - "which also satisfies all requirements of query.")); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [date_histogram] agg on field [foo] " + + "which also satisfies all requirements of query." + ) + ); } public void testDateHistoMissingFieldInCaps() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); histo.calendarInterval(new DateHistogramInterval("1d")) - .field("foo") - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + .field("foo") + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); final GroupConfig group = new GroupConfig( - // NOTE different field from the one in the query - new DateHistogramGroupConfig.CalendarInterval("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()) - ); - final List metrics = - Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg"))); - - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); + // NOTE different field from the one in the query + new DateHistogramGroupConfig.CalendarInterval("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()) + ); + final List metrics = Arrays.asList( + new MetricConfig("max_field", singletonList("max")), + new MetricConfig("avg_field", singletonList("avg")) + ); + + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); Set caps = singletonSet(new RollupJobCaps(job)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [date_histogram] agg on field [foo] which also " + - "satisfies all requirements of query.")); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [date_histogram] agg on field [foo] which also " + + "satisfies all requirements of query." + ) + ); } public void testHistoMissingFieldInCaps() { HistogramAggregationBuilder histo = new HistogramAggregationBuilder("test_histo"); histo.interval(1) - .field("foo") - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + .field("foo") + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); final GroupConfig group = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), - new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name - null - ); - final List metrics = - Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg"))); - - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); + new DateHistogramGroupConfig.CalendarInterval("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), + new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name + null + ); + final List metrics = Arrays.asList( + new MetricConfig("max_field", singletonList("max")), + new MetricConfig("avg_field", singletonList("avg")) + ); + + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); Set caps = singletonSet(new RollupJobCaps(job)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [histogram] agg on field [foo] which also " + - "satisfies all requirements of query.")); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [histogram] agg on field [foo] which also " + "satisfies all requirements of query." + ) + ); } public void testNoMatchingHistoInterval() { HistogramAggregationBuilder histo = new HistogramAggregationBuilder("test_histo"); histo.interval(1) - .field("bar") - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + .field("bar") + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); final GroupConfig group = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), - new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name - null - ); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), + new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name + null + ); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); Set caps = singletonSet(new RollupJobCaps(job)); - Exception e = expectThrows(RuntimeException.class, - () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [histogram] agg on field " + - "[bar] which also satisfies all requirements of query.")); + Exception e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [histogram] agg on field " + "[bar] which also satisfies all requirements of query." + ) + ); } public void testHistoIntervalNotMultiple() { @@ -457,18 +561,22 @@ public void testHistoIntervalNotMultiple() { .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", - new DateHistogramInterval("1d"), null, "UTC"), + final GroupConfig group = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, "UTC"), new HistogramGroupConfig(3L, "bar"), - null); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + null + ); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); - Exception e = expectThrows(RuntimeException.class, - () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [histogram] agg on field " + - "[bar] which also satisfies all requirements of query.")); + Exception e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [histogram] agg on field " + "[bar] which also satisfies all requirements of query." + ) + ); } public void testMissingMetric() { @@ -476,97 +584,111 @@ public void testMissingMetric() { final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final List metrics = singletonList(new MetricConfig("foo", Arrays.asList("avg", "max", "min", "sum"))); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); Set caps = singletonSet(new RollupJobCaps(job)); String aggType; Exception e; if (i == 0) { - e = expectThrows(IllegalArgumentException.class, - () -> RollupJobIdentifierUtils.findBestJobs(new MaxAggregationBuilder("test_metric").field("other_field"), caps)); + e = expectThrows( + IllegalArgumentException.class, + () -> RollupJobIdentifierUtils.findBestJobs(new MaxAggregationBuilder("test_metric").field("other_field"), caps) + ); aggType = "max"; } else if (i == 1) { - e = expectThrows(IllegalArgumentException.class, - () -> RollupJobIdentifierUtils.findBestJobs(new MinAggregationBuilder("test_metric").field("other_field"), caps)); + e = expectThrows( + IllegalArgumentException.class, + () -> RollupJobIdentifierUtils.findBestJobs(new MinAggregationBuilder("test_metric").field("other_field"), caps) + ); aggType = "min"; } else if (i == 2) { - e = expectThrows(IllegalArgumentException.class, - () -> RollupJobIdentifierUtils.findBestJobs(new SumAggregationBuilder("test_metric").field("other_field"), caps)); + e = expectThrows( + IllegalArgumentException.class, + () -> RollupJobIdentifierUtils.findBestJobs(new SumAggregationBuilder("test_metric").field("other_field"), caps) + ); aggType = "sum"; } else { - e = expectThrows(IllegalArgumentException.class, - () -> RollupJobIdentifierUtils.findBestJobs(new AvgAggregationBuilder("test_metric").field("other_field"), caps)); + e = expectThrows( + IllegalArgumentException.class, + () -> RollupJobIdentifierUtils.findBestJobs(new AvgAggregationBuilder("test_metric").field("other_field"), caps) + ); aggType = "avg"; } - assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [" + aggType + "] agg with name " + - "[test_metric] which also satisfies all requirements of query.")); + assertThat( + e.getMessage(), + equalTo( + "There is not a rollup job that has a [" + + aggType + + "] agg with name " + + "[test_metric] which also satisfies all requirements of query." + ) + ); } public void testValidateFixedInterval() { - boolean valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), - new DateHistogramInterval("100ms")); + boolean valid = RollupJobIdentifierUtils.validateFixedInterval( + new DateHistogramInterval("100ms"), + new DateHistogramInterval("100ms") + ); assertTrue(valid); - valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("200ms"), - new DateHistogramInterval("100ms")); + valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("200ms"), new DateHistogramInterval("100ms")); assertTrue(valid); - valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("1000ms"), - new DateHistogramInterval("200ms")); + valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("1000ms"), new DateHistogramInterval("200ms")); assertTrue(valid); - valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("5m"), - new DateHistogramInterval("5m")); + valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("5m"), new DateHistogramInterval("5m")); assertTrue(valid); - valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("20m"), - new DateHistogramInterval("5m")); + valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("20m"), new DateHistogramInterval("5m")); assertTrue(valid); - valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), - new DateHistogramInterval("500ms")); + valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), new DateHistogramInterval("500ms")); assertFalse(valid); - valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), - new DateHistogramInterval("5m")); + valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), new DateHistogramInterval("5m")); assertFalse(valid); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), new DateHistogramInterval("minute"))); - assertThat(e.getMessage(), equalTo("failed to parse setting [date_histo.config.interval] with value " + - "[minute] as a time value: unit is missing or unrecognized")); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), new DateHistogramInterval("minute")) + ); + assertThat( + e.getMessage(), + equalTo( + "failed to parse setting [date_histo.config.interval] with value " + + "[minute] as a time value: unit is missing or unrecognized" + ) + ); } public void testValidateCalendarInterval() { - boolean valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("second"), - new DateHistogramInterval("second")); + boolean valid = RollupJobIdentifierUtils.validateCalendarInterval( + new DateHistogramInterval("second"), + new DateHistogramInterval("second") + ); assertTrue(valid); - valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("minute"), - new DateHistogramInterval("second")); + valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("minute"), new DateHistogramInterval("second")); assertTrue(valid); - valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("month"), - new DateHistogramInterval("day")); + valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("month"), new DateHistogramInterval("day")); assertTrue(valid); - valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("1d"), - new DateHistogramInterval("1s")); + valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("1d"), new DateHistogramInterval("1s")); assertTrue(valid); - valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("second"), - new DateHistogramInterval("minute")); + valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("second"), new DateHistogramInterval("minute")); assertFalse(valid); - valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("second"), - new DateHistogramInterval("1m")); + valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("second"), new DateHistogramInterval("1m")); assertFalse(valid); // Fails because both are actually fixed - valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("100ms"), - new DateHistogramInterval("100ms")); + valid = RollupJobIdentifierUtils.validateCalendarInterval(new DateHistogramInterval("100ms"), new DateHistogramInterval("100ms")); assertFalse(valid); } @@ -580,12 +702,12 @@ public void testComparatorMixed() { if (randomBoolean()) { interval = getRandomCalendarInterval(); dateHistoConfig = new DateHistogramGroupConfig.CalendarInterval("foo", interval); - } else { + } else { interval = getRandomFixedInterval(); dateHistoConfig = new DateHistogramGroupConfig.FixedInterval("foo", interval); } GroupConfig group = new GroupConfig(dateHistoConfig); - RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); caps.add(cap); } @@ -611,7 +733,7 @@ public void testComparatorFixed() { for (int i = 0; i < numCaps; i++) { DateHistogramInterval interval = getRandomFixedInterval(); GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.FixedInterval("foo", interval)); - RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); caps.add(cap); } @@ -637,7 +759,7 @@ public void testComparatorCalendar() { for (int i = 0; i < numCaps; i++) { DateHistogramInterval interval = getRandomCalendarInterval(); GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", interval)); - RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); caps.add(cap); } @@ -658,10 +780,14 @@ public void testComparatorCalendar() { public void testObsoleteTimezone() { // Job has "obsolete" timezone - DateHistogramGroupConfig dateHisto = new DateHistogramGroupConfig.CalendarInterval("foo", - new DateHistogramInterval("1h"), null, "Canada/Mountain"); + DateHistogramGroupConfig dateHisto = new DateHistogramGroupConfig.CalendarInterval( + "foo", + new DateHistogramInterval("1h"), + null, + "Canada/Mountain" + ); GroupConfig group = new GroupConfig(dateHisto); - RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); @@ -683,7 +809,7 @@ public void testObsoleteTimezone() { dateHisto = new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"), null, "America/Edmonton"); group = new GroupConfig(dateHisto); - job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); cap = new RollupJobCaps(job); caps = singletonSet(cap); @@ -732,7 +858,7 @@ private static DateHistogramInterval getRandomFixedInterval() { } private static DateHistogramInterval getRandomCalendarInterval() { - return new DateHistogramInterval(UNITS.get(randomIntBetween(0, UNITS.size()-1))); + return new DateHistogramInterval(UNITS.get(randomIntBetween(0, UNITS.size() - 1))); } private Set singletonSet(RollupJobCaps cap) { diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java index 3b43d1167c3ae..3da0f60013b88 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.rollup; - import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; @@ -14,8 +13,8 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.aggregations.bucket.histogram.LongBounds; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.LongBounds; import org.elasticsearch.search.aggregations.bucket.range.GeoDistanceAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; @@ -59,37 +58,36 @@ public void setUp() throws Exception { public void testBasicDateHisto() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); histo.calendarInterval(new DateHistogramInterval("1d")) - .field("foo") - .extendedBounds(new LongBounds(0L, 1000L)) - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + .field("foo") + .extendedBounds(new LongBounds(0L, 1000L)) + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); List translated = translateAggregation(histo, namedWriteableRegistry); assertThat(translated.size(), equalTo(1)); assertThat(translated.get(0), Matchers.instanceOf(DateHistogramAggregationBuilder.class)); - DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); + DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder) translated.get(0); assertThat(translatedHisto.getCalendarInterval(), equalTo(new DateHistogramInterval("1d"))); assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); assertThat(translatedHisto.getSubAggregations().size(), equalTo(4)); Map subAggs = translatedHisto.getSubAggregations() - .stream().collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); + .stream() + .collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); assertThat(subAggs.get("the_max"), Matchers.instanceOf(MaxAggregationBuilder.class)); - assertThat(((MaxAggregationBuilder)subAggs.get("the_max")).field(), equalTo("max_field.max.value")); + assertThat(((MaxAggregationBuilder) subAggs.get("the_max")).field(), equalTo("max_field.max.value")); assertThat(subAggs.get("the_avg.value"), Matchers.instanceOf(SumAggregationBuilder.class)); - SumAggregationBuilder avg = (SumAggregationBuilder)subAggs.get("the_avg.value"); + SumAggregationBuilder avg = (SumAggregationBuilder) subAggs.get("the_avg.value"); assertThat(avg.field(), equalTo("avg_field.avg.value")); assertThat(subAggs.get("the_avg._count"), Matchers.instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)subAggs.get("the_avg._count")).field(), - equalTo("avg_field.avg._count")); + assertThat(((SumAggregationBuilder) subAggs.get("the_avg._count")).field(), equalTo("avg_field.avg._count")); assertThat(subAggs.get("test_histo._count"), Matchers.instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)subAggs.get("test_histo._count")).field(), - equalTo("foo.date_histogram._count")); + assertThat(((SumAggregationBuilder) subAggs.get("test_histo._count")).field(), equalTo("foo.date_histogram._count")); } public void testFormattedDateHisto() { @@ -103,7 +101,7 @@ public void testFormattedDateHisto() { List translated = translateAggregation(histo, namedWriteableRegistry); assertThat(translated.size(), equalTo(1)); assertThat(translated.get(0), Matchers.instanceOf(DateHistogramAggregationBuilder.class)); - DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); + DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder) translated.get(0); assertThat(translatedHisto.getCalendarInterval(), equalTo(new DateHistogramInterval("1d"))); assertThat(translatedHisto.format(), equalTo("yyyy-MM-dd")); @@ -119,82 +117,84 @@ public void testSimpleMetric() { int numAggs = 1; if (i == 0) { - translated = translateAggregation(new MaxAggregationBuilder("test_metric") - .field("foo"), namedWriteableRegistry); + translated = translateAggregation(new MaxAggregationBuilder("test_metric").field("foo"), namedWriteableRegistry); clazz = MaxAggregationBuilder.class; - fieldName = "foo.max.value"; + fieldName = "foo.max.value"; } else if (i == 1) { - translated = translateAggregation(new MinAggregationBuilder("test_metric") - .field("foo"), namedWriteableRegistry); + translated = translateAggregation(new MinAggregationBuilder("test_metric").field("foo"), namedWriteableRegistry); clazz = MinAggregationBuilder.class; - fieldName = "foo.min.value"; + fieldName = "foo.min.value"; } else if (i == 2) { - translated = translateAggregation(new SumAggregationBuilder("test_metric") - .field("foo"), namedWriteableRegistry); + translated = translateAggregation(new SumAggregationBuilder("test_metric").field("foo"), namedWriteableRegistry); clazz = SumAggregationBuilder.class; - fieldName = "foo.sum.value"; + fieldName = "foo.sum.value"; } assertThat(translated.size(), equalTo(numAggs)); assertThat(translated.get(0), Matchers.instanceOf(clazz)); assertThat((translated.get(0)).getName(), equalTo("test_metric")); - assertThat(((ValuesSourceAggregationBuilder)translated.get(0)).field(), equalTo(fieldName)); + assertThat(((ValuesSourceAggregationBuilder) translated.get(0)).field(), equalTo(fieldName)); } public void testUnsupportedMetric() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> translateAggregation(new StatsAggregationBuilder("test_metric") - .field("foo"), namedWriteableRegistry)); - assertThat(e.getMessage(), equalTo("Unable to translate aggregation tree into Rollup. Aggregation [test_metric] is of type " + - "[StatsAggregationBuilder] which is currently unsupported.")); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> translateAggregation(new StatsAggregationBuilder("test_metric").field("foo"), namedWriteableRegistry) + ); + assertThat( + e.getMessage(), + equalTo( + "Unable to translate aggregation tree into Rollup. Aggregation [test_metric] is of type " + + "[StatsAggregationBuilder] which is currently unsupported." + ) + ); } public void testDateHistoIntervalWithMinMax() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); histo.calendarInterval(new DateHistogramInterval("1d")) - .field("foo") - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + .field("foo") + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); List translated = translateAggregation(histo, namedWriteableRegistry); assertThat(translated.size(), equalTo(1)); assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class)); - DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); + DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder) translated.get(0); assertThat(translatedHisto.getCalendarInterval().toString(), equalTo("1d")); assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); assertThat(translatedHisto.getSubAggregations().size(), equalTo(4)); Map subAggs = translatedHisto.getSubAggregations() - .stream().collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); + .stream() + .collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); assertThat(subAggs.get("the_max"), instanceOf(MaxAggregationBuilder.class)); - assertThat(((MaxAggregationBuilder)subAggs.get("the_max")).field(), equalTo("max_field.max.value")); + assertThat(((MaxAggregationBuilder) subAggs.get("the_max")).field(), equalTo("max_field.max.value")); assertThat(subAggs.get("the_avg.value"), instanceOf(SumAggregationBuilder.class)); - SumAggregationBuilder avg = (SumAggregationBuilder)subAggs.get("the_avg.value"); + SumAggregationBuilder avg = (SumAggregationBuilder) subAggs.get("the_avg.value"); assertThat(avg.field(), equalTo("avg_field.avg.value")); assertThat(subAggs.get("the_avg._count"), instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)subAggs.get("the_avg._count")).field(), - equalTo("avg_field.avg._count")); + assertThat(((SumAggregationBuilder) subAggs.get("the_avg._count")).field(), equalTo("avg_field.avg._count")); assertThat(subAggs.get("test_histo._count"), instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)subAggs.get("test_histo._count")).field(), - equalTo("foo.date_histogram._count")); + assertThat(((SumAggregationBuilder) subAggs.get("test_histo._count")).field(), equalTo("foo.date_histogram._count")); } public void testDateHistoLongIntervalWithMinMax() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); histo.fixedInterval(DateHistogramInterval.seconds(86400)) - .field("foo") - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + .field("foo") + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); List translated = translateAggregation(histo, namedWriteableRegistry); assertThat(translated.size(), equalTo(1)); assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class)); - DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); + DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder) translated.get(0); assertNull(translatedHisto.getCalendarInterval()); assertThat(translatedHisto.getFixedInterval(), equalTo(new DateHistogramInterval("86400s"))); @@ -202,35 +202,32 @@ public void testDateHistoLongIntervalWithMinMax() { assertThat(translatedHisto.getSubAggregations().size(), equalTo(4)); Map subAggs = translatedHisto.getSubAggregations() - .stream().collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); + .stream() + .collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); assertThat(subAggs.get("the_max"), instanceOf(MaxAggregationBuilder.class)); - assertThat(((MaxAggregationBuilder)subAggs.get("the_max")).field(), equalTo("max_field.max.value")); + assertThat(((MaxAggregationBuilder) subAggs.get("the_max")).field(), equalTo("max_field.max.value")); assertThat(subAggs.get("the_avg.value"), instanceOf(SumAggregationBuilder.class)); - SumAggregationBuilder avg = (SumAggregationBuilder)subAggs.get("the_avg.value"); + SumAggregationBuilder avg = (SumAggregationBuilder) subAggs.get("the_avg.value"); assertThat(avg.field(), equalTo("avg_field.avg.value")); assertThat(subAggs.get("the_avg._count"), instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)subAggs.get("the_avg._count")).field(), - equalTo("avg_field.avg._count")); + assertThat(((SumAggregationBuilder) subAggs.get("the_avg._count")).field(), equalTo("avg_field.avg._count")); assertThat(subAggs.get("test_histo._count"), instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)subAggs.get("test_histo._count")).field(), - equalTo("foo.date_histogram._count")); + assertThat(((SumAggregationBuilder) subAggs.get("test_histo._count")).field(), equalTo("foo.date_histogram._count")); } public void testDateHistoWithTimezone() { ZoneId timeZone = ZoneId.of(randomFrom(ZoneId.getAvailableZoneIds())); DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); - histo.fixedInterval(new DateHistogramInterval("86400000ms")) - .field("foo") - .timeZone(timeZone); + histo.fixedInterval(new DateHistogramInterval("86400000ms")).field("foo").timeZone(timeZone); List translated = translateAggregation(histo, namedWriteableRegistry); assertThat(translated.size(), equalTo(1)); assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class)); - DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); + DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder) translated.get(0); assertThat(translatedHisto.getFixedInterval().toString(), equalTo("86400000ms")); assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); @@ -244,108 +241,110 @@ public void testDeprecatedInterval() { List translated = translateAggregation(histo, namedWriteableRegistry); assertThat(translated.size(), equalTo(1)); assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class)); - DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); + DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder) translated.get(0); assertThat(translatedHisto.getFixedInterval().toString(), equalTo("86400s")); assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); } public void testAvgMetric() { - List translated = translateAggregation(new AvgAggregationBuilder("test_metric") - .field("foo"), namedWriteableRegistry); + List translated = translateAggregation( + new AvgAggregationBuilder("test_metric").field("foo"), + namedWriteableRegistry + ); assertThat(translated.size(), equalTo(2)); Map metrics = translated.stream() - .collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); + .collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); assertThat(metrics.get("test_metric.value"), Matchers.instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)metrics.get("test_metric.value")).field(), - equalTo("foo.avg.value")); + assertThat(((SumAggregationBuilder) metrics.get("test_metric.value")).field(), equalTo("foo.avg.value")); assertThat(metrics.get("test_metric._count"), Matchers.instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)metrics.get("test_metric._count")).field(), - equalTo("foo.avg._count")); + assertThat(((SumAggregationBuilder) metrics.get("test_metric._count")).field(), equalTo("foo.avg._count")); } public void testStringTerms() throws IOException { TermsAggregationBuilder terms = new TermsAggregationBuilder("test_string_terms").userValueTypeHint(ValueType.STRING); terms.field("foo") - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); List translated = translateAggregation(terms, namedWriteableRegistry); assertThat(translated.size(), equalTo(1)); assertThat(translated.get(0), Matchers.instanceOf(TermsAggregationBuilder.class)); - TermsAggregationBuilder translatedHisto = (TermsAggregationBuilder)translated.get(0); + TermsAggregationBuilder translatedHisto = (TermsAggregationBuilder) translated.get(0); assertThat(translatedHisto.field(), equalTo("foo.terms.value")); assertThat(translatedHisto.getSubAggregations().size(), equalTo(4)); Map subAggs = translatedHisto.getSubAggregations() - .stream().collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); + .stream() + .collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); assertThat(subAggs.get("the_max"), Matchers.instanceOf(MaxAggregationBuilder.class)); - assertThat(((MaxAggregationBuilder)subAggs.get("the_max")).field(), equalTo("max_field.max.value")); + assertThat(((MaxAggregationBuilder) subAggs.get("the_max")).field(), equalTo("max_field.max.value")); assertThat(subAggs.get("the_avg.value"), Matchers.instanceOf(SumAggregationBuilder.class)); - SumAggregationBuilder avg = (SumAggregationBuilder)subAggs.get("the_avg.value"); + SumAggregationBuilder avg = (SumAggregationBuilder) subAggs.get("the_avg.value"); assertThat(avg.field(), equalTo("avg_field.avg.value")); assertThat(subAggs.get("the_avg._count"), Matchers.instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)subAggs.get("the_avg._count")).field(), - equalTo("avg_field.avg._count")); + assertThat(((SumAggregationBuilder) subAggs.get("the_avg._count")).field(), equalTo("avg_field.avg._count")); assertThat(subAggs.get("test_string_terms._count"), Matchers.instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)subAggs.get("test_string_terms._count")).field(), - equalTo("foo.terms._count")); + assertThat(((SumAggregationBuilder) subAggs.get("test_string_terms._count")).field(), equalTo("foo.terms._count")); } public void testBasicHisto() { HistogramAggregationBuilder histo = new HistogramAggregationBuilder("test_histo"); histo.field("foo") - .interval(1L) - .extendedBounds(0.0, 1000.0) - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + .interval(1L) + .extendedBounds(0.0, 1000.0) + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); List translated = translateAggregation(histo, namedWriteableRegistry); assertThat(translated.size(), equalTo(1)); assertThat(translated.get(0), Matchers.instanceOf(HistogramAggregationBuilder.class)); - HistogramAggregationBuilder translatedHisto = (HistogramAggregationBuilder)translated.get(0); + HistogramAggregationBuilder translatedHisto = (HistogramAggregationBuilder) translated.get(0); assertThat(translatedHisto.field(), equalTo("foo.histogram.value")); assertThat(translatedHisto.getSubAggregations().size(), equalTo(4)); Map subAggs = translatedHisto.getSubAggregations() - .stream().collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); + .stream() + .collect(Collectors.toMap(AggregationBuilder::getName, Function.identity())); assertThat(subAggs.get("the_max"), Matchers.instanceOf(MaxAggregationBuilder.class)); - assertThat(((MaxAggregationBuilder)subAggs.get("the_max")).field(), equalTo("max_field.max.value")); + assertThat(((MaxAggregationBuilder) subAggs.get("the_max")).field(), equalTo("max_field.max.value")); assertThat(subAggs.get("the_avg.value"), Matchers.instanceOf(SumAggregationBuilder.class)); - SumAggregationBuilder avg = (SumAggregationBuilder)subAggs.get("the_avg.value"); + SumAggregationBuilder avg = (SumAggregationBuilder) subAggs.get("the_avg.value"); assertThat(avg.field(), equalTo("avg_field.avg.value")); assertThat(subAggs.get("the_avg._count"), Matchers.instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)subAggs.get("the_avg._count")).field(), - equalTo("avg_field.avg._count")); + assertThat(((SumAggregationBuilder) subAggs.get("the_avg._count")).field(), equalTo("avg_field.avg._count")); assertThat(subAggs.get("test_histo._count"), Matchers.instanceOf(SumAggregationBuilder.class)); - assertThat(((SumAggregationBuilder)subAggs.get("test_histo._count")).field(), - equalTo("foo.histogram._count")); + assertThat(((SumAggregationBuilder) subAggs.get("test_histo._count")).field(), equalTo("foo.histogram._count")); } public void testUnsupportedAgg() { GeoDistanceAggregationBuilder geo = new GeoDistanceAggregationBuilder("test_geo", new GeoPoint(0.0, 0.0)); geo.field("foo") - .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) - .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); - - Exception e = expectThrows(RuntimeException.class, - () -> translateAggregation(geo, namedWriteableRegistry)); - assertThat(e.getMessage(), equalTo("Unable to translate aggregation tree into Rollup. Aggregation [test_geo] is of type " + - "[GeoDistanceAggregationBuilder] which is currently unsupported.")); + .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) + .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); + + Exception e = expectThrows(RuntimeException.class, () -> translateAggregation(geo, namedWriteableRegistry)); + assertThat( + e.getMessage(), + equalTo( + "Unable to translate aggregation tree into Rollup. Aggregation [test_geo] is of type " + + "[GeoDistanceAggregationBuilder] which is currently unsupported." + ) + ); } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index 1f25f513c2a39..5e68cf490bdc2 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -25,18 +25,18 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordField; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; @@ -89,54 +89,74 @@ public class RollupResponseTranslationTests extends AggregatorTestCase { public void testLiveFailure() { - MultiSearchResponse.Item[] failure = new MultiSearchResponse.Item[]{ - new MultiSearchResponse.Item(null, new RuntimeException("foo")), - new MultiSearchResponse.Item(null, null)}; + MultiSearchResponse.Item[] failure = new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(null, new RuntimeException("foo")), + new MultiSearchResponse.Item(null, null) }; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - Exception e = expectThrows(RuntimeException.class, - () -> RollupResponseTranslator.combineResponses(failure, - InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY))); + Exception e = expectThrows( + RuntimeException.class, + () -> RollupResponseTranslator.combineResponses( + failure, + InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY) + ) + ); assertThat(e.getMessage(), equalTo("foo")); - e = expectThrows(RuntimeException.class, - () -> RollupResponseTranslator.translateResponse(failure, - InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY))); + e = expectThrows( + RuntimeException.class, + () -> RollupResponseTranslator.translateResponse( + failure, + InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY) + ) + ); assertThat(e.getMessage(), equalTo("foo")); - e = expectThrows(RuntimeException.class, - () -> RollupResponseTranslator.verifyResponse(failure[0])); + e = expectThrows(RuntimeException.class, () -> RollupResponseTranslator.verifyResponse(failure[0])); assertThat(e.getMessage(), equalTo("foo")); } public void testRollupFailure() { - MultiSearchResponse.Item[] failure = new MultiSearchResponse.Item[]{ - new MultiSearchResponse.Item(null, new RuntimeException("rollup failure"))}; + MultiSearchResponse.Item[] failure = new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(null, new RuntimeException("rollup failure")) }; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - Exception e = expectThrows(RuntimeException.class, - () -> RollupResponseTranslator.translateResponse(failure, - InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY))); + Exception e = expectThrows( + RuntimeException.class, + () -> RollupResponseTranslator.translateResponse( + failure, + InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY) + ) + ); assertThat(e.getMessage(), equalTo("rollup failure")); } public void testLiveMissingRollupMissing() { - MultiSearchResponse.Item[] failure = new MultiSearchResponse.Item[]{ - new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")), - new MultiSearchResponse.Item(null, new IndexNotFoundException("foo"))}; + MultiSearchResponse.Item[] failure = new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")), + new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")) }; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, - () -> RollupResponseTranslator.combineResponses(failure, - InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY))); - assertThat(e.getMessage(), equalTo("Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + - "Rollup does not support partial search results, please try the request again.")); + ResourceNotFoundException e = expectThrows( + ResourceNotFoundException.class, + () -> RollupResponseTranslator.combineResponses( + failure, + InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY) + ) + ); + assertThat( + e.getMessage(), + equalTo( + "Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again." + ) + ); } public void testMissingLiveIndex() throws Exception { @@ -171,17 +191,27 @@ public void testMissingLiveIndex() throws Exception { Aggregations mockAggsWithout = InternalAggregations.from(aggTree); when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{ - new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")), - new MultiSearchResponse.Item(responseWithout, null)}; + MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")), + new MultiSearchResponse.Item(responseWithout, null) }; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> RollupResponseTranslator.combineResponses(msearch, - InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY))); - assertThat(e.getMessage(), equalTo("Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + - "Rollup does not support partial search results, please try the request again.")); + ResourceNotFoundException e = expectThrows( + ResourceNotFoundException.class, + () -> RollupResponseTranslator.combineResponses( + msearch, + InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY) + ) + ); + assertThat( + e.getMessage(), + equalTo( + "Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again." + ) + ); } public void testRolledMissingAggs() throws Exception { @@ -190,14 +220,15 @@ public void testRolledMissingAggs() throws Exception { when(responseWithout.getAggregations()).thenReturn(InternalAggregations.EMPTY); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{ - new MultiSearchResponse.Item(responseWithout, null)}; + MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(responseWithout, null) }; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - SearchResponse response = RollupResponseTranslator.translateResponse(msearch, - InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY)); + SearchResponse response = RollupResponseTranslator.translateResponse( + msearch, + InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY) + ); assertNotNull(response); Aggregations responseAggs = response.getAggregations(); assertThat(responseAggs.asList().size(), equalTo(0)); @@ -206,17 +237,27 @@ public void testRolledMissingAggs() throws Exception { public void testMissingRolledIndex() { SearchResponse response = mock(SearchResponse.class); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{ - new MultiSearchResponse.Item(response, null), - new MultiSearchResponse.Item(null, new IndexNotFoundException("foo"))}; + MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(response, null), + new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")) }; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> RollupResponseTranslator.combineResponses(msearch, - InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY))); - assertThat(e.getMessage(), equalTo("Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + - "Rollup does not support partial search results, please try the request again.")); + ResourceNotFoundException e = expectThrows( + ResourceNotFoundException.class, + () -> RollupResponseTranslator.combineResponses( + msearch, + InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY) + ) + ); + assertThat( + e.getMessage(), + equalTo( + "Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again." + ) + ); } public void testVerifyNormal() throws Exception { @@ -229,8 +270,7 @@ public void testVerifyNormal() throws Exception { public void testVerifyMissingNormal() { MultiSearchResponse.Item missing = new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")); - Exception e = expectThrows(RuntimeException.class, - () -> RollupResponseTranslator.verifyResponse(missing)); + Exception e = expectThrows(RuntimeException.class, () -> RollupResponseTranslator.verifyResponse(missing)); assertThat(e.getMessage(), equalTo("no such index [foo]")); } @@ -270,9 +310,13 @@ public void testTranslateRollup() throws Exception { BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( - bigArrays, scriptService, b -> {}, PipelineTree.EMPTY); + bigArrays, + scriptService, + b -> {}, + PipelineTree.EMPTY + ); - SearchResponse finalResponse = RollupResponseTranslator.translateResponse(new MultiSearchResponse.Item[]{item}, context); + SearchResponse finalResponse = RollupResponseTranslator.translateResponse(new MultiSearchResponse.Item[] { item }, context); assertNotNull(finalResponse); Aggregations responseAggs = finalResponse.getAggregations(); assertNotNull(finalResponse); @@ -285,12 +329,23 @@ public void testTranslateMissingRollup() { BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( - bigArrays, scriptService, b -> {}, PipelineTree.EMPTY); - - ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, - () -> RollupResponseTranslator.translateResponse(new MultiSearchResponse.Item[]{missing}, context)); - assertThat(e.getMessage(), equalTo("Index [foo] was not found, likely because it was deleted while the request was in-flight. " + - "Rollup does not support partial search results, please try the request again.")); + bigArrays, + scriptService, + b -> {}, + PipelineTree.EMPTY + ); + + ResourceNotFoundException e = expectThrows( + ResourceNotFoundException.class, + () -> RollupResponseTranslator.translateResponse(new MultiSearchResponse.Item[] { missing }, context) + ); + assertThat( + e.getMessage(), + equalTo( + "Index [foo] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again." + ) + ); } public void testMissingFilter() { @@ -312,14 +367,18 @@ public void testMissingFilter() { when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{unrolledResponse, rolledResponse}; + MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { unrolledResponse, rolledResponse }; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - Exception e = expectThrows(RuntimeException.class, - () -> RollupResponseTranslator.combineResponses(msearch, - InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY))); + Exception e = expectThrows( + RuntimeException.class, + () -> RollupResponseTranslator.combineResponses( + msearch, + InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY) + ) + ); assertThat(e.getMessage(), containsString("Expected [bizzbuzz] to be a FilterAggregation")); } @@ -341,16 +400,19 @@ public void testMatchingNameNotFilter() { when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{unrolledResponse, rolledResponse}; + MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { unrolledResponse, rolledResponse }; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - Exception e = expectThrows(RuntimeException.class, - () -> RollupResponseTranslator.combineResponses(msearch, - InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY))); - assertThat(e.getMessage(), - equalTo("Expected [filter_foo] to be a FilterAggregation, but was [InternalMax]")); + Exception e = expectThrows( + RuntimeException.class, + () -> RollupResponseTranslator.combineResponses( + msearch, + InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY) + ) + ); + assertThat(e.getMessage(), equalTo("Expected [filter_foo] to be a FilterAggregation, but was [InternalMax]")); } public void testSimpleReduction() throws Exception { @@ -395,14 +457,15 @@ public void testSimpleReduction() throws Exception { when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{unrolledResponse, rolledResponse}; + MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { unrolledResponse, rolledResponse }; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - - SearchResponse response = RollupResponseTranslator.combineResponses(msearch, - InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY)); + SearchResponse response = RollupResponseTranslator.combineResponses( + msearch, + InternalAggregation.ReduceContext.forFinalReduction(bigArrays, scriptService, b -> {}, PipelineTree.EMPTY) + ); assertNotNull(response); Aggregations responseAggs = response.getAggregations(); assertNotNull(responseAggs); @@ -412,139 +475,169 @@ public void testSimpleReduction() throws Exception { public void testUnsupported() throws IOException { - GeoBoundsAggregationBuilder geo1 - = new GeoBoundsAggregationBuilder("foo").field("bar"); - GeoBoundsAggregationBuilder geo2 - = new GeoBoundsAggregationBuilder("foo").field("bar"); + GeoBoundsAggregationBuilder geo1 = new GeoBoundsAggregationBuilder("foo").field("bar"); + GeoBoundsAggregationBuilder geo2 = new GeoBoundsAggregationBuilder("foo").field("bar"); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(singleton(new NumericDocValuesField("number", 7))); - iw.addDocument(singleton(new NumericDocValuesField("number", 2))); - iw.addDocument(singleton(new NumericDocValuesField("number", 3))); - }, geo1, - iw -> { - iw.addDocument(singleton(new NumericDocValuesField("number", 7))); - iw.addDocument(singleton(new NumericDocValuesField("number", 2))); - iw.addDocument(singleton(new NumericDocValuesField("number", 3))); - }, geo2, - new MappedFieldType[]{fieldType}, new MappedFieldType[]{fieldType}); - - Exception e = expectThrows(RuntimeException.class, - () -> RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0)); - assertThat(e.getMessage(), equalTo("Unable to unroll aggregation tree. " + - "Aggregation [foo] is of type [InternalGeoBounds] which is currently unsupported.")); + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField("number", 7))); + iw.addDocument(singleton(new NumericDocValuesField("number", 2))); + iw.addDocument(singleton(new NumericDocValuesField("number", 3))); + }, geo1, iw -> { + iw.addDocument(singleton(new NumericDocValuesField("number", 7))); + iw.addDocument(singleton(new NumericDocValuesField("number", 2))); + iw.addDocument(singleton(new NumericDocValuesField("number", 3))); + }, geo2, new MappedFieldType[] { fieldType }, new MappedFieldType[] { fieldType }); + + Exception e = expectThrows(RuntimeException.class, () -> RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0)); + assertThat( + e.getMessage(), + equalTo( + "Unable to unroll aggregation tree. " + "Aggregation [foo] is of type [InternalGeoBounds] which is currently unsupported." + ) + ); } public void testUnsupportedMultiBucket() throws IOException { MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("foo"); QueryBuilder filter = QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery("field", "foo")) - .should(QueryBuilders.termQuery("field", "bar")); - SignificantTermsAggregationBuilder builder = new SignificantTermsAggregationBuilder( - "test") - .field("field") - .backgroundFilter(filter); - - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueDoc(100, 1)); - iw.addDocument(timestampedValueDoc(200, 2)); - iw.addDocument(timestampedValueDoc(300, 3)); - }, builder, - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, 1)); - iw.addDocument(timestampedValueRollupDoc(200, 2)); - iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, builder, - new MappedFieldType[]{fieldType}, new MappedFieldType[]{fieldType}); - - - Exception e = expectThrows(RuntimeException.class, - () -> RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0)); - assertThat(e.getMessage(), equalTo("Unable to unroll aggregation tree. Aggregation [test] is of type " + - "[UnmappedSignificantTerms] which is currently unsupported.")); + .must(QueryBuilders.termQuery("field", "foo")) + .should(QueryBuilders.termQuery("field", "bar")); + SignificantTermsAggregationBuilder builder = new SignificantTermsAggregationBuilder("test").field("field").backgroundFilter(filter); + + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueDoc(100, 1)); + iw.addDocument(timestampedValueDoc(200, 2)); + iw.addDocument(timestampedValueDoc(300, 3)); + }, builder, iw -> { + iw.addDocument(timestampedValueRollupDoc(100, 1)); + iw.addDocument(timestampedValueRollupDoc(200, 2)); + iw.addDocument(timestampedValueRollupDoc(300, 3)); + }, builder, new MappedFieldType[] { fieldType }, new MappedFieldType[] { fieldType }); + + Exception e = expectThrows(RuntimeException.class, () -> RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0)); + assertThat( + e.getMessage(), + equalTo( + "Unable to unroll aggregation tree. Aggregation [test] is of type " + + "[UnmappedSignificantTerms] which is currently unsupported." + ) + ); } public void testMismatch() throws IOException { - GeoBoundsAggregationBuilder geoBoundsAggregationBuilder - = new GeoBoundsAggregationBuilder("histo").field("bar"); + GeoBoundsAggregationBuilder geoBoundsAggregationBuilder = new GeoBoundsAggregationBuilder("histo").field("bar"); - DateHistogramAggregationBuilder histoBuilder = new DateHistogramAggregationBuilder("histo") - .field("bar").fixedInterval(new DateHistogramInterval("100ms")); + DateHistogramAggregationBuilder histoBuilder = new DateHistogramAggregationBuilder("histo").field("bar") + .fixedInterval(new DateHistogramInterval("100ms")); FilterAggregationBuilder filterBuilder = new FilterAggregationBuilder("filter", new TermQueryBuilder("foo", "bar")); filterBuilder.subAggregation(histoBuilder); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(singleton(new NumericDocValuesField("number", 7))); - iw.addDocument(singleton(new NumericDocValuesField("number", 2))); - iw.addDocument(singleton(new NumericDocValuesField("number", 3))); - }, geoBoundsAggregationBuilder, - iw -> { - iw.addDocument(singleton(new NumericDocValuesField("number", 7))); - iw.addDocument(singleton(new NumericDocValuesField("number", 2))); - iw.addDocument(singleton(new NumericDocValuesField("number", 3))); - }, filterBuilder, - new MappedFieldType[]{fieldType}, new MappedFieldType[]{fieldType}); - - // TODO SearchResponse.Clusters is not public, using null for now. Should fix upstream. - MultiSearchResponse.Item unrolledItem = new MultiSearchResponse.Item(new SearchResponse( - new InternalSearchResponse(null, - InternalAggregations.from(Collections.singletonList(responses.get(0))), null, null, false, false, 1), - null, 1, 1, 0, 10, null, null), null); - MultiSearchResponse.Item rolledItem = new MultiSearchResponse.Item(new SearchResponse( - new InternalSearchResponse(null, - InternalAggregations.from(Collections.singletonList(responses.get(1))), null, null, false, false, 1), - null, 1, 1, 0, 10, null, null), null); - - MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{unrolledItem, rolledItem}; + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new NumericDocValuesField("number", 7))); + iw.addDocument(singleton(new NumericDocValuesField("number", 2))); + iw.addDocument(singleton(new NumericDocValuesField("number", 3))); + }, geoBoundsAggregationBuilder, iw -> { + iw.addDocument(singleton(new NumericDocValuesField("number", 7))); + iw.addDocument(singleton(new NumericDocValuesField("number", 2))); + iw.addDocument(singleton(new NumericDocValuesField("number", 3))); + }, filterBuilder, new MappedFieldType[] { fieldType }, new MappedFieldType[] { fieldType }); + + // TODO SearchResponse.Clusters is not public, using null for now. Should fix upstream. + MultiSearchResponse.Item unrolledItem = new MultiSearchResponse.Item( + new SearchResponse( + new InternalSearchResponse( + null, + InternalAggregations.from(Collections.singletonList(responses.get(0))), + null, + null, + false, + false, + 1 + ), + null, + 1, + 1, + 0, + 10, + null, + null + ), + null + ); + MultiSearchResponse.Item rolledItem = new MultiSearchResponse.Item( + new SearchResponse( + new InternalSearchResponse( + null, + InternalAggregations.from(Collections.singletonList(responses.get(1))), + null, + null, + false, + false, + 1 + ), + null, + 1, + 1, + 0, + 10, + null, + null + ), + null + ); + + MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[] { unrolledItem, rolledItem }; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); InternalAggregation.ReduceContext reduceContext = InternalAggregation.ReduceContext.forFinalReduction( - bigArrays, scriptService, b -> {}, PipelineTree.EMPTY); - ClassCastException e = expectThrows(ClassCastException.class, - () -> RollupResponseTranslator.combineResponses(msearch, reduceContext)); - assertThat(e.getMessage(), - containsString("org.elasticsearch.search.aggregations.metrics.InternalGeoBounds")); - assertThat(e.getMessage(), - containsString("org.elasticsearch.search.aggregations.InternalMultiBucketAggregation")); + bigArrays, + scriptService, + b -> {}, + PipelineTree.EMPTY + ); + ClassCastException e = expectThrows( + ClassCastException.class, + () -> RollupResponseTranslator.combineResponses(msearch, reduceContext) + ); + assertThat(e.getMessage(), containsString("org.elasticsearch.search.aggregations.metrics.InternalGeoBounds")); + assertThat(e.getMessage(), containsString("org.elasticsearch.search.aggregations.InternalMultiBucketAggregation")); } public void testDateHisto() throws IOException { - DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")); + DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo").field("timestamp") + .fixedInterval(new DateHistogramInterval("100ms")); - DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .fixedInterval(new DateHistogramInterval("100ms")) - .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) - .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); + DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo").field( + "timestamp.date_histogram." + RollupField.TIMESTAMP + ) + .fixedInterval(new DateHistogramInterval("100ms")) + .subAggregation( + new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) + ); DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG); - - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueDoc(100, 1)); - iw.addDocument(timestampedValueDoc(200, 2)); - iw.addDocument(timestampedValueDoc(300, 3)); - }, nonRollupHisto, - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, 1)); - iw.addDocument(timestampedValueRollupDoc(200, 2)); - iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, rollupHisto, - new MappedFieldType[]{nrFTtimestamp}, new MappedFieldType[]{rFTtimestamp, rFTvalue}); + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + "timestamp.date_histogram." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG + ); + + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueDoc(100, 1)); + iw.addDocument(timestampedValueDoc(200, 2)); + iw.addDocument(timestampedValueDoc(300, 3)); + }, nonRollupHisto, iw -> { + iw.addDocument(timestampedValueRollupDoc(100, 1)); + iw.addDocument(timestampedValueRollupDoc(200, 2)); + iw.addDocument(timestampedValueRollupDoc(300, 3)); + }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); assertThat(unrolled.toString(), equalTo(responses.get(0).toString())); @@ -552,37 +645,36 @@ public void testDateHisto() throws IOException { } public void testDateHistoWithGap() throws IOException { - DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")) - .minDocCount(0); - - DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .fixedInterval(new DateHistogramInterval("100ms")) - .minDocCount(0) - .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) - .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); + DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo").field("timestamp") + .fixedInterval(new DateHistogramInterval("100ms")) + .minDocCount(0); + + DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo").field( + "timestamp.date_histogram." + RollupField.TIMESTAMP + ) + .fixedInterval(new DateHistogramInterval("100ms")) + .minDocCount(0) + .subAggregation( + new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) + ); DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG); - - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueDoc(100, 1)); - iw.addDocument(timestampedValueDoc(200, 2)); - iw.addDocument(timestampedValueDoc(400, 3)); - }, nonRollupHisto, - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, 1)); - iw.addDocument(timestampedValueRollupDoc(200, 2)); - iw.addDocument(timestampedValueRollupDoc(400, 3)); - }, rollupHisto, - new MappedFieldType[]{nrFTtimestamp}, new MappedFieldType[]{rFTtimestamp, rFTvalue}); - - + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + "timestamp.date_histogram." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG + ); + + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueDoc(100, 1)); + iw.addDocument(timestampedValueDoc(200, 2)); + iw.addDocument(timestampedValueDoc(400, 3)); + }, nonRollupHisto, iw -> { + iw.addDocument(timestampedValueRollupDoc(100, 1)); + iw.addDocument(timestampedValueRollupDoc(200, 2)); + iw.addDocument(timestampedValueRollupDoc(400, 3)); + }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); @@ -590,32 +682,45 @@ public void testDateHistoWithGap() throws IOException { BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( - bigArrays, scriptService, b -> {}, PipelineTree.EMPTY); - - InternalAggregation reduced = ((InternalDateHistogram)unrolled).reduce(Collections.singletonList(unrolled), context); - assertThat(reduced.toString(), equalTo("{\"histo\":{\"buckets\":[{\"key_as_string\":\"1970-01-01T00:00:00.100Z\",\"key\":100," + - "\"doc_count\":1},{\"key_as_string\":\"1970-01-01T00:00:00.200Z\",\"key\":200,\"doc_count\":1}," + - "{\"key_as_string\":\"1970-01-01T00:00:00.300Z\",\"key\":300,\"doc_count\":0,\"histo._count\":{\"value\":0.0}}," + - "{\"key_as_string\":\"1970-01-01T00:00:00.400Z\",\"key\":400,\"doc_count\":1}]}}")); + bigArrays, + scriptService, + b -> {}, + PipelineTree.EMPTY + ); + + InternalAggregation reduced = ((InternalDateHistogram) unrolled).reduce(Collections.singletonList(unrolled), context); + assertThat( + reduced.toString(), + equalTo( + "{\"histo\":{\"buckets\":[{\"key_as_string\":\"1970-01-01T00:00:00.100Z\",\"key\":100," + + "\"doc_count\":1},{\"key_as_string\":\"1970-01-01T00:00:00.200Z\",\"key\":200,\"doc_count\":1}," + + "{\"key_as_string\":\"1970-01-01T00:00:00.300Z\",\"key\":300,\"doc_count\":0,\"histo._count\":{\"value\":0.0}}," + + "{\"key_as_string\":\"1970-01-01T00:00:00.400Z\",\"key\":400,\"doc_count\":1}]}}" + ) + ); } public void testNonMatchingPartition() throws IOException { - DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")) - .minDocCount(0); - - DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .fixedInterval(new DateHistogramInterval("100ms")) - .minDocCount(0) - .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) - .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); + DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo").field("timestamp") + .fixedInterval(new DateHistogramInterval("100ms")) + .minDocCount(0); + + DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo").field( + "timestamp.date_histogram." + RollupField.TIMESTAMP + ) + .fixedInterval(new DateHistogramInterval("100ms")) + .minDocCount(0) + .subAggregation( + new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) + ); DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG); + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + "timestamp.date_histogram." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG + ); KeywordFieldMapper.KeywordFieldType nrKeywordFT = new KeywordFieldMapper.KeywordFieldType("partition"); KeywordFieldMapper.KeywordFieldType rKeywordFT = new KeywordFieldMapper.KeywordFieldType("partition"); @@ -647,234 +752,236 @@ public void testNonMatchingPartition() throws IOException { doc.add(new TextField("partition", "b", Field.Store.NO)); iw.addDocument(doc); - }, nonRollupHisto, new MappedFieldType[]{nrFTtimestamp, nrKeywordFT})); + }, nonRollupHisto, new MappedFieldType[] { nrFTtimestamp, nrKeywordFT })); // Note: term query for "a" - results.add(doQuery(new TermQuery(new Term("partition.terms." + RollupField.VALUE, "a")), - iw -> { - // Time 100: Two "a" documents, one "b" doc - Document doc = new Document(); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 100)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 2)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); - doc.add(new TextField("partition.terms." + RollupField.VALUE, "a", Field.Store.NO)); - doc.add(new SortedNumericDocValuesField("partition.terms." + RollupField.COUNT_FIELD, 2)); - iw.addDocument(doc); - doc = new Document(); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 100)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 1)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); - doc.add(new TextField("partition.terms." + RollupField.VALUE, "b", Field.Store.NO)); - doc.add(new SortedNumericDocValuesField("partition.terms." + RollupField.COUNT_FIELD, 1)); - iw.addDocument(doc); - - // Time 200: one "a" document, one "b" doc - doc = new Document(); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 200)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 1)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); - doc.add(new TextField("partition.terms." + RollupField.VALUE, "a", Field.Store.NO)); - doc.add(new SortedNumericDocValuesField("partition.terms." + RollupField.COUNT_FIELD, 1)); - iw.addDocument(doc); - doc = new Document(); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 200)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 1)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); - doc.add(new TextField("partition.terms." + RollupField.VALUE, "b", Field.Store.NO)); - doc.add(new SortedNumericDocValuesField("partition.terms." + RollupField.COUNT_FIELD, 1)); - iw.addDocument(doc); - }, rollupHisto, new MappedFieldType[]{rFTtimestamp, rFTvalue, rKeywordFT})); + results.add(doQuery(new TermQuery(new Term("partition.terms." + RollupField.VALUE, "a")), iw -> { + // Time 100: Two "a" documents, one "b" doc + Document doc = new Document(); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 100)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 2)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); + doc.add(new TextField("partition.terms." + RollupField.VALUE, "a", Field.Store.NO)); + doc.add(new SortedNumericDocValuesField("partition.terms." + RollupField.COUNT_FIELD, 2)); + iw.addDocument(doc); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 100)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 1)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); + doc.add(new TextField("partition.terms." + RollupField.VALUE, "b", Field.Store.NO)); + doc.add(new SortedNumericDocValuesField("partition.terms." + RollupField.COUNT_FIELD, 1)); + iw.addDocument(doc); + + // Time 200: one "a" document, one "b" doc + doc = new Document(); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 200)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 1)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); + doc.add(new TextField("partition.terms." + RollupField.VALUE, "a", Field.Store.NO)); + doc.add(new SortedNumericDocValuesField("partition.terms." + RollupField.COUNT_FIELD, 1)); + iw.addDocument(doc); + doc = new Document(); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 200)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 1)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); + doc.add(new TextField("partition.terms." + RollupField.VALUE, "b", Field.Store.NO)); + doc.add(new SortedNumericDocValuesField("partition.terms." + RollupField.COUNT_FIELD, 1)); + iw.addDocument(doc); + }, rollupHisto, new MappedFieldType[] { rFTtimestamp, rFTvalue, rKeywordFT })); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(results.get(1), null, null, 0); - assertThat(((InternalDateHistogram)unrolled).getBuckets().size(), equalTo(2)); - assertThat(((InternalDateHistogram)unrolled).getBuckets().get(0).getDocCount(), equalTo(2L)); // two "a" at 100 - assertThat(((InternalDateHistogram)unrolled).getBuckets().get(1).getDocCount(), equalTo(1L)); // one "a" at 200 - assertThat(((InternalDateHistogram)unrolled).getBuckets().get(0).getKeyAsString(), equalTo("1970-01-01T00:00:00.100Z")); - assertThat(unrolled.toString(), equalTo("{\"histo\":{\"buckets\":[{\"key_as_string\":\"1970-01-01T00:00:00.100Z\"," + - "\"key\":100,\"doc_count\":2},{\"key_as_string\":\"1970-01-01T00:00:00.200Z\",\"key\":200,\"doc_count\":1}]}}")); + assertThat(((InternalDateHistogram) unrolled).getBuckets().size(), equalTo(2)); + assertThat(((InternalDateHistogram) unrolled).getBuckets().get(0).getDocCount(), equalTo(2L)); // two "a" at 100 + assertThat(((InternalDateHistogram) unrolled).getBuckets().get(1).getDocCount(), equalTo(1L)); // one "a" at 200 + assertThat(((InternalDateHistogram) unrolled).getBuckets().get(0).getKeyAsString(), equalTo("1970-01-01T00:00:00.100Z")); + assertThat( + unrolled.toString(), + equalTo( + "{\"histo\":{\"buckets\":[{\"key_as_string\":\"1970-01-01T00:00:00.100Z\"," + + "\"key\":100,\"doc_count\":2},{\"key_as_string\":\"1970-01-01T00:00:00.200Z\",\"key\":200,\"doc_count\":1}]}}" + ) + ); assertThat(unrolled.toString(), not(equalTo(results.get(1).toString()))); } public void testDateHistoOverlappingAggTrees() throws IOException { - DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")); + DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo").field("timestamp") + .fixedInterval(new DateHistogramInterval("100ms")); - DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .fixedInterval(new DateHistogramInterval("100ms")) - .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) - .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); + DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo").field( + "timestamp.date_histogram." + RollupField.TIMESTAMP + ) + .fixedInterval(new DateHistogramInterval("100ms")) + .subAggregation( + new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) + ); DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG); - - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueDoc(100, 1)); - iw.addDocument(timestampedValueDoc(200, 2)); - iw.addDocument(timestampedValueDoc(300, 3)); - }, nonRollupHisto, - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, 1)); - iw.addDocument(timestampedValueRollupDoc(200, 2)); - iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, rollupHisto, - new MappedFieldType[]{nrFTtimestamp}, new MappedFieldType[]{rFTtimestamp, rFTvalue}); - - List currentTree = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueDoc(100, 1)); - iw.addDocument(timestampedValueDoc(200, 2)); - }, nonRollupHisto, - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, 1)); - iw.addDocument(timestampedValueRollupDoc(200, 2)); - }, rollupHisto, - new MappedFieldType[]{nrFTtimestamp}, new MappedFieldType[]{rFTtimestamp, rFTvalue}); + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + "timestamp.date_histogram." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG + ); + + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueDoc(100, 1)); + iw.addDocument(timestampedValueDoc(200, 2)); + iw.addDocument(timestampedValueDoc(300, 3)); + }, nonRollupHisto, iw -> { + iw.addDocument(timestampedValueRollupDoc(100, 1)); + iw.addDocument(timestampedValueRollupDoc(200, 2)); + iw.addDocument(timestampedValueRollupDoc(300, 3)); + }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); + + List currentTree = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueDoc(100, 1)); + iw.addDocument(timestampedValueDoc(200, 2)); + }, nonRollupHisto, iw -> { + iw.addDocument(timestampedValueRollupDoc(100, 1)); + iw.addDocument(timestampedValueRollupDoc(200, 2)); + }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, currentTree.get(1), 0); // Times 100/200 overlap with currentTree, so doc_count will be zero - assertThat(((Object[])unrolled.getProperty("_count"))[0], equalTo(0L)); - assertThat(((Object[])unrolled.getProperty("_count"))[1], equalTo(0L)); + assertThat(((Object[]) unrolled.getProperty("_count"))[0], equalTo(0L)); + assertThat(((Object[]) unrolled.getProperty("_count"))[1], equalTo(0L)); // This time (300) was not in the currentTree so it will have a doc_count of one - assertThat(((Object[])unrolled.getProperty("_count"))[2], equalTo(1L)); + assertThat(((Object[]) unrolled.getProperty("_count"))[2], equalTo(1L)); } public void testDateHistoOverlappingMergeRealIntoZero() throws IOException { - DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")); + DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo").field("timestamp") + .fixedInterval(new DateHistogramInterval("100ms")); - DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .fixedInterval(new DateHistogramInterval("100ms")) - .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) - .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); + DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo").field( + "timestamp.date_histogram." + RollupField.TIMESTAMP + ) + .fixedInterval(new DateHistogramInterval("100ms")) + .subAggregation( + new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) + ); DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG); - - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueDoc(100, 1)); - iw.addDocument(timestampedValueDoc(200, 2)); - iw.addDocument(timestampedValueDoc(300, 3)); - }, nonRollupHisto, - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, 1)); - iw.addDocument(timestampedValueRollupDoc(200, 2)); - iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, rollupHisto, - new MappedFieldType[]{nrFTtimestamp}, new MappedFieldType[]{rFTtimestamp, rFTvalue}); - - InternalAggregation currentTree = doQuery(new MatchAllDocsQuery(), - iw -> { - Document doc = new Document(); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 100)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 0)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); - iw.addDocument(doc); - - Document doc2 = new Document(); - doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 200)); - doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 0)); - doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); - iw.addDocument(doc2); - - }, rollupHisto, new MappedFieldType[]{rFTtimestamp, rFTvalue}); + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + "timestamp.date_histogram." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG + ); + + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueDoc(100, 1)); + iw.addDocument(timestampedValueDoc(200, 2)); + iw.addDocument(timestampedValueDoc(300, 3)); + }, nonRollupHisto, iw -> { + iw.addDocument(timestampedValueRollupDoc(100, 1)); + iw.addDocument(timestampedValueRollupDoc(200, 2)); + iw.addDocument(timestampedValueRollupDoc(300, 3)); + }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); + + InternalAggregation currentTree = doQuery(new MatchAllDocsQuery(), iw -> { + Document doc = new Document(); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 100)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 0)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); + iw.addDocument(doc); + + Document doc2 = new Document(); + doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 200)); + doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 0)); + doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); + iw.addDocument(doc2); + + }, rollupHisto, new MappedFieldType[] { rFTtimestamp, rFTvalue }); // In this test we merge real buckets into zero count buckets (e.g. empty list of buckets after unrolling) InternalAggregation unrolledCurrentTree = RollupResponseTranslator.unrollAgg(currentTree, null, null, 0); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, unrolledCurrentTree, 0); // Times 100/200 overlap with currentTree, but doc_count was zero, so returned doc_count should be one - assertThat(((Object[])unrolled.getProperty("_count"))[0], equalTo(1L)); - assertThat(((Object[])unrolled.getProperty("_count"))[1], equalTo(1L)); + assertThat(((Object[]) unrolled.getProperty("_count"))[0], equalTo(1L)); + assertThat(((Object[]) unrolled.getProperty("_count"))[1], equalTo(1L)); // This time (300) was not in the currentTree so it will have a doc_count of one - assertThat(((Object[])unrolled.getProperty("_count"))[2], equalTo(1L)); + assertThat(((Object[]) unrolled.getProperty("_count"))[2], equalTo(1L)); } public void testDateHistoOverlappingMergeZeroIntoReal() throws IOException { - DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")).minDocCount(0); - - DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .fixedInterval(new DateHistogramInterval("100ms")) - .minDocCount(0) - .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) - .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); + DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo").field("timestamp") + .fixedInterval(new DateHistogramInterval("100ms")) + .minDocCount(0); + + DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo").field( + "timestamp.date_histogram." + RollupField.TIMESTAMP + ) + .fixedInterval(new DateHistogramInterval("100ms")) + .minDocCount(0) + .subAggregation( + new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) + ); DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG); + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + "timestamp.date_histogram." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG + ); - InternalAggregation currentTree = doQuery(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, 1)); - iw.addDocument(timestampedValueRollupDoc(200, 2)); - iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, rollupHisto, new MappedFieldType[]{rFTtimestamp, rFTvalue}); + InternalAggregation currentTree = doQuery(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueRollupDoc(100, 1)); + iw.addDocument(timestampedValueRollupDoc(200, 2)); + iw.addDocument(timestampedValueRollupDoc(300, 3)); + }, rollupHisto, new MappedFieldType[] { rFTtimestamp, rFTvalue }); - InternalAggregation responses = doQuery(new MatchAllDocsQuery(), - iw -> { - Document doc = new Document(); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 100)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 0)); - doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); - iw.addDocument(doc); - - Document doc2 = new Document(); - doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 200)); - doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 0)); - doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); - iw.addDocument(doc2); + InternalAggregation responses = doQuery(new MatchAllDocsQuery(), iw -> { + Document doc = new Document(); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 100)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 0)); + doc.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); + iw.addDocument(doc); - }, rollupHisto, new MappedFieldType[]{rFTtimestamp, rFTvalue}); + Document doc2 = new Document(); + doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.TIMESTAMP, 200)); + doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.COUNT_FIELD, 0)); + doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); + iw.addDocument(doc2); + }, rollupHisto, new MappedFieldType[] { rFTtimestamp, rFTvalue }); // In this test, we merge zero_count buckets into existing buckets to ensure the metrics remain InternalAggregation unrolledCurrentTree = RollupResponseTranslator.unrollAgg(currentTree, null, null, 0); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses, null, unrolledCurrentTree, 0); // All values overlap and were zero counts themselves, so the unrolled response should be empty list of buckets - assertThat(((InternalDateHistogram)unrolled).getBuckets().size(), equalTo(0)); + assertThat(((InternalDateHistogram) unrolled).getBuckets().size(), equalTo(0)); } public void testAvg() throws IOException { - AvgAggregationBuilder nonRollup = new AvgAggregationBuilder("avg") - .field("foo"); + AvgAggregationBuilder nonRollup = new AvgAggregationBuilder("avg").field("foo"); - SumAggregationBuilder rollup = new SumAggregationBuilder("avg") - .field("foo.avg." + RollupField.VALUE); + SumAggregationBuilder rollup = new SumAggregationBuilder("avg").field("foo.avg." + RollupField.VALUE); MappedFieldType nrFTvalue = new NumberFieldMapper.NumberFieldType("foo", NumberFieldMapper.NumberType.LONG); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("foo.avg." + RollupField.VALUE, - NumberFieldMapper.NumberType.LONG); - - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueDoc(100, 1)); - iw.addDocument(timestampedValueDoc(200, 2)); - iw.addDocument(timestampedValueDoc(300, 3)); - }, nonRollup, - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, 6)); - }, rollup, - new MappedFieldType[]{nrFTvalue}, new MappedFieldType[]{rFTvalue}); - - // NOTE: we manually set the count to 3 here, which is somewhat cheating. Will have to rely on + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("foo.avg." + RollupField.VALUE, NumberFieldMapper.NumberType.LONG); + + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueDoc(100, 1)); + iw.addDocument(timestampedValueDoc(200, 2)); + iw.addDocument(timestampedValueDoc(300, 3)); + }, + nonRollup, + iw -> { iw.addDocument(timestampedValueRollupDoc(100, 6)); }, + rollup, + new MappedFieldType[] { nrFTvalue }, + new MappedFieldType[] { rFTvalue } + ); + + // NOTE: we manually set the count to 3 here, which is somewhat cheating. Will have to rely on // other tests to verify that the avg's count is set correctly InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 3); assertThat(unrolled.toString(), equalTo(responses.get(0).toString())); @@ -910,16 +1017,17 @@ public void testMetric() throws IOException { MappedFieldType nrFTvalue = new NumberFieldMapper.NumberFieldType("foo", NumberFieldMapper.NumberType.LONG); MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType(fieldName, NumberFieldMapper.NumberType.LONG); - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueDoc(100, 1)); - iw.addDocument(timestampedValueDoc(200, 2)); - iw.addDocument(timestampedValueDoc(300, 3)); - }, nonRollup, - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, rollupValue)); - }, rollup, - new MappedFieldType[]{nrFTvalue}, new MappedFieldType[]{rFTvalue}); + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueDoc(100, 1)); + iw.addDocument(timestampedValueDoc(200, 2)); + iw.addDocument(timestampedValueDoc(300, 3)); + }, + nonRollup, + iw -> { iw.addDocument(timestampedValueRollupDoc(100, rollupValue)); }, + rollup, + new MappedFieldType[] { nrFTvalue }, + new MappedFieldType[] { rFTvalue } + ); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 1); assertThat(unrolled.toString(), equalTo(responses.get(0).toString())); @@ -927,60 +1035,68 @@ public void testMetric() throws IOException { public void testUnsupportedMetric() throws IOException { - - AggregationBuilder nonRollup = new CardinalityAggregationBuilder("test_metric").userValueTypeHint(ValueType.LONG) - .field("foo"); + AggregationBuilder nonRollup = new CardinalityAggregationBuilder("test_metric").userValueTypeHint(ValueType.LONG).field("foo"); String fieldName = "foo.max." + RollupField.VALUE; - AggregationBuilder rollup = new CardinalityAggregationBuilder("test_metric").userValueTypeHint(ValueType.LONG) - .field(fieldName); + AggregationBuilder rollup = new CardinalityAggregationBuilder("test_metric").userValueTypeHint(ValueType.LONG).field(fieldName); MappedFieldType nrFTvalue = new NumberFieldMapper.NumberFieldType("foo", NumberFieldMapper.NumberType.LONG); MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType(fieldName, NumberFieldMapper.NumberType.LONG); - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueDoc(100, 1)); - iw.addDocument(timestampedValueDoc(200, 2)); - iw.addDocument(timestampedValueDoc(300, 3)); - }, nonRollup, - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, 3)); - }, rollup, - new MappedFieldType[]{nrFTvalue}, new MappedFieldType[]{rFTvalue}); - - RuntimeException e = expectThrows(RuntimeException.class, - () -> RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 1)); - assertThat(e.getMessage(), equalTo("Unable to unroll metric. Aggregation [test_metric] is of type " + - "[InternalCardinality] which is currently unsupported.")); + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueDoc(100, 1)); + iw.addDocument(timestampedValueDoc(200, 2)); + iw.addDocument(timestampedValueDoc(300, 3)); + }, + nonRollup, + iw -> { iw.addDocument(timestampedValueRollupDoc(100, 3)); }, + rollup, + new MappedFieldType[] { nrFTvalue }, + new MappedFieldType[] { rFTvalue } + ); + + RuntimeException e = expectThrows( + RuntimeException.class, + () -> RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 1) + ); + assertThat( + e.getMessage(), + equalTo( + "Unable to unroll metric. Aggregation [test_metric] is of type " + "[InternalCardinality] which is currently unsupported." + ) + ); } public void testStringTerms() throws IOException { TermsAggregationBuilder nonRollupTerms = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) - .field("stringField"); + .field("stringField"); TermsAggregationBuilder rollupTerms = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) - .field("stringfield.terms." + RollupField.VALUE) - .subAggregation(new SumAggregationBuilder("terms." + RollupField.COUNT_FIELD) - .field("stringfield.terms." + RollupField.COUNT_FIELD)); + .field("stringfield.terms." + RollupField.VALUE) + .subAggregation( + new SumAggregationBuilder("terms." + RollupField.COUNT_FIELD).field("stringfield.terms." + RollupField.COUNT_FIELD) + ); KeywordFieldMapper.KeywordFieldType nrFTterm = new KeywordFieldMapper.KeywordFieldType(nonRollupTerms.field()); KeywordFieldMapper.KeywordFieldType rFTterm = new KeywordFieldMapper.KeywordFieldType(rollupTerms.field()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("stringfield.terms." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG); - - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(stringValueDoc("abc")); - iw.addDocument(stringValueDoc("abc")); - iw.addDocument(stringValueDoc("abc")); - }, nonRollupTerms, - iw -> { - iw.addDocument(stringValueRollupDoc("abc", 3)); - }, rollupTerms, - new MappedFieldType[]{nrFTterm}, new MappedFieldType[]{rFTterm, rFTvalue}); + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + "stringfield.terms." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG + ); + + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(stringValueDoc("abc")); + iw.addDocument(stringValueDoc("abc")); + iw.addDocument(stringValueDoc("abc")); + }, + nonRollupTerms, + iw -> { iw.addDocument(stringValueRollupDoc("abc", 3)); }, + rollupTerms, + new MappedFieldType[] { nrFTterm }, + new MappedFieldType[] { rFTterm, rFTvalue } + ); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); assertThat(unrolled.toString(), equalTo(responses.get(0).toString())); @@ -993,30 +1109,34 @@ public void testStringTermsNullValue() throws IOException { TermsAggregationBuilder rollupTerms = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) .field("stringfield.terms." + RollupField.VALUE) - .subAggregation(new SumAggregationBuilder("terms." + RollupField.COUNT_FIELD) - .field("stringfield.terms." + RollupField.COUNT_FIELD)); + .subAggregation( + new SumAggregationBuilder("terms." + RollupField.COUNT_FIELD).field("stringfield.terms." + RollupField.COUNT_FIELD) + ); KeywordFieldMapper.KeywordFieldType nrFTterm = new KeywordFieldMapper.KeywordFieldType(nonRollupTerms.field()); KeywordFieldMapper.KeywordFieldType rFTterm = new KeywordFieldMapper.KeywordFieldType(rollupTerms.field()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("stringfield.terms." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG); - - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(stringValueDoc("abc")); - iw.addDocument(stringValueDoc("abc")); - iw.addDocument(stringValueDoc("abc")); - - // off target - Document doc = new Document(); - doc.add(new SortedSetDocValuesField("otherField", new BytesRef("other"))); - iw.addDocument(doc); - }, nonRollupTerms, - iw -> { - iw.addDocument(stringValueRollupDoc("abc", 3)); - }, rollupTerms, - new MappedFieldType[]{nrFTterm}, new MappedFieldType[]{rFTterm, rFTvalue}); + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + "stringfield.terms." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG + ); + + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(stringValueDoc("abc")); + iw.addDocument(stringValueDoc("abc")); + iw.addDocument(stringValueDoc("abc")); + + // off target + Document doc = new Document(); + doc.add(new SortedSetDocValuesField("otherField", new BytesRef("other"))); + iw.addDocument(doc); + }, + nonRollupTerms, + iw -> { iw.addDocument(stringValueRollupDoc("abc", 3)); }, + rollupTerms, + new MappedFieldType[] { nrFTterm }, + new MappedFieldType[] { rFTterm, rFTvalue } + ); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); @@ -1026,30 +1146,33 @@ public void testStringTermsNullValue() throws IOException { } public void testLongTerms() throws IOException { - TermsAggregationBuilder nonRollupTerms = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.LONG) - .field("longField"); + TermsAggregationBuilder nonRollupTerms = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.LONG).field("longField"); TermsAggregationBuilder rollupTerms = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.LONG) - .field("longfield.terms." + RollupField.VALUE) - .subAggregation(new SumAggregationBuilder("terms." + RollupField.COUNT_FIELD) - .field("longfield.terms." + RollupField.COUNT_FIELD)); + .field("longfield.terms." + RollupField.VALUE) + .subAggregation( + new SumAggregationBuilder("terms." + RollupField.COUNT_FIELD).field("longfield.terms." + RollupField.COUNT_FIELD) + ); MappedFieldType nrFTterm = new NumberFieldMapper.NumberFieldType(nonRollupTerms.field(), NumberFieldMapper.NumberType.LONG); MappedFieldType rFTterm = new NumberFieldMapper.NumberFieldType(rollupTerms.field(), NumberFieldMapper.NumberType.LONG); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("longfield.terms." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG); - - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(longValueDoc(19L)); - iw.addDocument(longValueDoc(19L)); - iw.addDocument(longValueDoc(19L)); - }, nonRollupTerms, - iw -> { - iw.addDocument(longValueRollupDoc(19L, 3)); - }, rollupTerms, - new MappedFieldType[]{nrFTterm}, new MappedFieldType[]{rFTterm, rFTvalue}); + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + "longfield.terms." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG + ); + + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(longValueDoc(19L)); + iw.addDocument(longValueDoc(19L)); + iw.addDocument(longValueDoc(19L)); + }, + nonRollupTerms, + iw -> { iw.addDocument(longValueRollupDoc(19L, 3)); }, + rollupTerms, + new MappedFieldType[] { nrFTterm }, + new MappedFieldType[] { rFTterm, rFTvalue } + ); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); assertThat(unrolled.toString(), equalTo(responses.get(0).toString())); @@ -1057,33 +1180,31 @@ public void testLongTerms() throws IOException { } public void testHisto() throws IOException { - HistogramAggregationBuilder nonRollupHisto = new HistogramAggregationBuilder("histo") - .field("bar").interval(100); + HistogramAggregationBuilder nonRollupHisto = new HistogramAggregationBuilder("histo").field("bar").interval(100); - HistogramAggregationBuilder rollupHisto = new HistogramAggregationBuilder("histo") - .field("bar.histogram." + RollupField.VALUE) - .interval(100) - .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) - .field("bar.histogram." + RollupField.COUNT_FIELD)); + HistogramAggregationBuilder rollupHisto = new HistogramAggregationBuilder("histo").field("bar.histogram." + RollupField.VALUE) + .interval(100) + .subAggregation( + new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("bar.histogram." + RollupField.COUNT_FIELD) + ); MappedFieldType nrFTbar = new NumberFieldMapper.NumberFieldType(nonRollupHisto.field(), NumberFieldMapper.NumberType.LONG); MappedFieldType rFTbar = new NumberFieldMapper.NumberFieldType(rollupHisto.field(), NumberFieldMapper.NumberType.LONG); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("bar.histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG); - - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueDoc(100, 1)); - iw.addDocument(timestampedValueDoc(200, 2)); - iw.addDocument(timestampedValueDoc(300, 3)); - }, nonRollupHisto, - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, 1)); - iw.addDocument(timestampedValueRollupDoc(200, 2)); - iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, rollupHisto, - new MappedFieldType[]{nrFTbar}, new MappedFieldType[]{rFTbar, rFTvalue}); + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + "bar.histogram." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG + ); + + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueDoc(100, 1)); + iw.addDocument(timestampedValueDoc(200, 2)); + iw.addDocument(timestampedValueDoc(300, 3)); + }, nonRollupHisto, iw -> { + iw.addDocument(timestampedValueRollupDoc(100, 1)); + iw.addDocument(timestampedValueRollupDoc(200, 2)); + iw.addDocument(timestampedValueRollupDoc(300, 3)); + }, rollupHisto, new MappedFieldType[] { nrFTbar }, new MappedFieldType[] { rFTbar, rFTvalue }); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); assertThat(unrolled.toString(), equalTo(responses.get(0).toString())); @@ -1091,42 +1212,45 @@ public void testHisto() throws IOException { } public void testOverlappingBuckets() throws IOException { - DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")); + DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo").field("timestamp") + .fixedInterval(new DateHistogramInterval("100ms")); - DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .fixedInterval(new DateHistogramInterval("100ms")) - .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) - .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); + DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo").field( + "timestamp.date_histogram." + RollupField.TIMESTAMP + ) + .fixedInterval(new DateHistogramInterval("100ms")) + .subAggregation( + new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) + ); DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG); - - List responses = doQueries(new MatchAllDocsQuery(), - iw -> { - iw.addDocument(timestampedValueDoc(100, 1)); - iw.addDocument(timestampedValueDoc(200, 2)); - iw.addDocument(timestampedValueDoc(300, 3)); - }, nonRollupHisto, - iw -> { - iw.addDocument(timestampedValueRollupDoc(100, 100)); - iw.addDocument(timestampedValueRollupDoc(200, 200)); - iw.addDocument(timestampedValueRollupDoc(300, 300)); - iw.addDocument(timestampedValueRollupDoc(400, 4)); // <-- Only one that should show up in rollup - }, rollupHisto, - new MappedFieldType[]{nrFTtimestamp}, new MappedFieldType[]{rFTtimestamp, rFTvalue}); + MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + "timestamp.date_histogram." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG + ); + + List responses = doQueries(new MatchAllDocsQuery(), iw -> { + iw.addDocument(timestampedValueDoc(100, 1)); + iw.addDocument(timestampedValueDoc(200, 2)); + iw.addDocument(timestampedValueDoc(300, 3)); + }, nonRollupHisto, iw -> { + iw.addDocument(timestampedValueRollupDoc(100, 100)); + iw.addDocument(timestampedValueRollupDoc(200, 200)); + iw.addDocument(timestampedValueRollupDoc(300, 300)); + iw.addDocument(timestampedValueRollupDoc(400, 4)); // <-- Only one that should show up in rollup + }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), responses.get(0), null, 0); - assertThat(((InternalDateHistogram)unrolled).getBuckets().size(), equalTo(1)); - assertThat(((InternalDateHistogram)unrolled).getBuckets().get(0).getDocCount(), equalTo(1L)); - assertThat(((InternalDateHistogram)unrolled).getBuckets().get(0).getKeyAsString(), equalTo("1970-01-01T00:00:00.400Z")); - assertThat(unrolled.toString(), equalTo("{\"histo\":{\"buckets\":[{\"key_as_string\":\"1970-01-01T00:00:00.400Z\"," + - "\"key\":400,\"doc_count\":1}]}}")); + assertThat(((InternalDateHistogram) unrolled).getBuckets().size(), equalTo(1)); + assertThat(((InternalDateHistogram) unrolled).getBuckets().get(0).getDocCount(), equalTo(1L)); + assertThat(((InternalDateHistogram) unrolled).getBuckets().get(0).getKeyAsString(), equalTo("1970-01-01T00:00:00.400Z")); + assertThat( + unrolled.toString(), + equalTo("{\"histo\":{\"buckets\":[{\"key_as_string\":\"1970-01-01T00:00:00.400Z\"," + "\"key\":400,\"doc_count\":1}]}}") + ); assertThat(unrolled.toString(), not(equalTo(responses.get(1).toString()))); } @@ -1183,14 +1307,15 @@ private Document longValueRollupDoc(Long longValue, long docCount) { return doc; } - private List doQueries(Query query, - CheckedConsumer buildNonRollupIndex, - AggregationBuilder nonRollupAggBuilder, - CheckedConsumer buildRollupIndex, - AggregationBuilder rollupAggBuilder, - MappedFieldType[] nonRollupFieldType, - MappedFieldType[] rollupFieldType) - throws IOException { + private List doQueries( + Query query, + CheckedConsumer buildNonRollupIndex, + AggregationBuilder nonRollupAggBuilder, + CheckedConsumer buildRollupIndex, + AggregationBuilder rollupAggBuilder, + MappedFieldType[] nonRollupFieldType, + MappedFieldType[] rollupFieldType + ) throws IOException { List results = new ArrayList<>(2); results.add(doQuery(query, buildNonRollupIndex, nonRollupAggBuilder, nonRollupFieldType)); @@ -1199,10 +1324,12 @@ private List doQueries(Query query, return results; } - private InternalAggregation doQuery(Query query, - CheckedConsumer buildIndex, - AggregationBuilder aggBuilder, MappedFieldType[] fieldType) - throws IOException { + private InternalAggregation doQuery( + Query query, + CheckedConsumer buildIndex, + AggregationBuilder aggBuilder, + MappedFieldType[] fieldType + ) throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/DeleteJobActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/DeleteJobActionRequestTests.java index 1ecd7c3247db2..b1cdd19e809f7 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/DeleteJobActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/DeleteJobActionRequestTests.java @@ -22,5 +22,3 @@ protected Writeable.Reader instanceReader() { return Request::new; } } - - diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java index 9990e516c62f6..bdc1cf0a8bfa1 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java @@ -39,9 +39,11 @@ protected Writeable.Reader instanceReader() { public void testStateCheckNoPersistentTasks() { GetRollupJobsAction.Request request = new GetRollupJobsAction.Request("foo"); ClusterState state = ClusterState.builder(new ClusterName("_name")) - .metadata(Metadata.builder().putCustom(PersistentTasksCustomMetadata.TYPE, - new PersistentTasksCustomMetadata(0L, Collections.emptyMap()))) - .build(); + .metadata( + Metadata.builder() + .putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, Collections.emptyMap())) + ) + .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, state); assertFalse(hasRollupJobs); } @@ -49,21 +51,24 @@ public void testStateCheckNoPersistentTasks() { public void testStateCheckAllNoPersistentTasks() { GetRollupJobsAction.Request request = new GetRollupJobsAction.Request("_all"); ClusterState state = ClusterState.builder(new ClusterName("_name")) - .metadata(Metadata.builder().putCustom(PersistentTasksCustomMetadata.TYPE, - new PersistentTasksCustomMetadata(0L, Collections.emptyMap()))) - .build(); + .metadata( + Metadata.builder() + .putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, Collections.emptyMap())) + ) + .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, state); assertFalse(hasRollupJobs); } public void testStateCheckNoMatchingPersistentTasks() { GetRollupJobsAction.Request request = new GetRollupJobsAction.Request("foo"); - Map> tasks - = Collections.singletonMap("bar", new PersistentTasksCustomMetadata.PersistentTask<>("bar", "bar", null, 1, null)); + Map> tasks = Collections.singletonMap( + "bar", + new PersistentTasksCustomMetadata.PersistentTask<>("bar", "bar", null, 1, null) + ); ClusterState state = ClusterState.builder(new ClusterName("_name")) - .metadata(Metadata.builder().putCustom(PersistentTasksCustomMetadata.TYPE, - new PersistentTasksCustomMetadata(0L, tasks))) - .build(); + .metadata(Metadata.builder().putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, tasks))) + .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, state); assertFalse(hasRollupJobs); } @@ -71,12 +76,13 @@ public void testStateCheckNoMatchingPersistentTasks() { public void testStateCheckMatchingPersistentTasks() { GetRollupJobsAction.Request request = new GetRollupJobsAction.Request("foo"); RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); - Map> tasks - = Collections.singletonMap("foo", new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupJob.NAME, job, 1, null)); + Map> tasks = Collections.singletonMap( + "foo", + new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupJob.NAME, job, 1, null) + ); ClusterState state = ClusterState.builder(new ClusterName("_name")) - .metadata(Metadata.builder().putCustom(PersistentTasksCustomMetadata.TYPE, - new PersistentTasksCustomMetadata(0L, tasks))) - .build(); + .metadata(Metadata.builder().putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, tasks))) + .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, state); assertTrue(hasRollupJobs); } @@ -84,12 +90,13 @@ public void testStateCheckMatchingPersistentTasks() { public void testStateCheckAllMatchingPersistentTasks() { GetRollupJobsAction.Request request = new GetRollupJobsAction.Request("_all"); RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); - Map> tasks - = Collections.singletonMap("foo", new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupJob.NAME, job, 1, null)); + Map> tasks = Collections.singletonMap( + "foo", + new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupJob.NAME, job, 1, null) + ); ClusterState state = ClusterState.builder(new ClusterName("_name")) - .metadata(Metadata.builder().putCustom(PersistentTasksCustomMetadata.TYPE, - new PersistentTasksCustomMetadata(0L, tasks))) - .build(); + .metadata(Metadata.builder().putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, tasks))) + .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, state); assertTrue(hasRollupJobs); } @@ -102,12 +109,9 @@ public void testStateCheckAllWithSeveralMatchingPersistentTasks() { tasks.put("foo", new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupJob.NAME, job, 1, null)); tasks.put("bar", new PersistentTasksCustomMetadata.PersistentTask<>("bar", RollupJob.NAME, job2, 1, null)); ClusterState state = ClusterState.builder(new ClusterName("_name")) - .metadata(Metadata.builder().putCustom(PersistentTasksCustomMetadata.TYPE, - new PersistentTasksCustomMetadata(0L, tasks))) - .build(); + .metadata(Metadata.builder().putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, tasks))) + .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, state); assertTrue(hasRollupJobs); } } - - diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java index 5541149d30ab5..d2a38f65633d2 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.rollup.action; - import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.metadata.Metadata; @@ -28,7 +27,6 @@ import static org.hamcrest.Matchers.equalTo; - public class GetRollupCapsActionRequestTests extends AbstractWireSerializingTestCase { @Override @@ -73,9 +71,10 @@ public void testMissingMeta() throws IOException { public void testMissingJob() throws IOException { String indexPattern = randomBoolean() ? randomAlphaOfLength(10) : randomAlphaOfLength(10) + "-*"; - MappingMetadata mappingMeta = new MappingMetadata(RollupField.NAME, Collections.singletonMap(RollupField.NAME, - Collections.singletonMap("_meta", - Collections.emptyMap()))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.NAME, + Collections.singletonMap(RollupField.NAME, Collections.singletonMap("_meta", Collections.emptyMap())) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -88,11 +87,13 @@ public void testOneJob() throws IOException { String jobName = randomAlphaOfLength(5); RollupJobConfig job = ConfigTestHelpers.randomRollupJobConfig(random(), jobName); - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, - Collections.singletonMap(jobName, job))))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, Collections.singletonMap(jobName, job))) + ) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -104,17 +105,20 @@ public void testOneJob() throws IOException { public void testMultipleJobs() throws IOException { String indexPattern = randomBoolean() ? randomAlphaOfLength(10) : randomAlphaOfLength(10) + "-*"; - int num = randomIntBetween(1,5); + int num = randomIntBetween(1, 5); Map jobs = new HashMap<>(num); for (int i = 0; i < num; i++) { String jobName = randomAlphaOfLength(5); jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName)); } - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, jobs)) + ) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -130,7 +134,7 @@ public void testNoIndices() { } public void testAllIndices() throws IOException { - int num = randomIntBetween(1,5); + int num = randomIntBetween(1, 5); ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); int indexCounter = 0; for (int j = 0; j < 5; j++) { @@ -143,10 +147,13 @@ public void testAllIndices() throws IOException { jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, indexName)); } - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, jobs)) + ) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -158,7 +165,7 @@ public void testAllIndices() throws IOException { } public void testOneIndex() throws IOException { - int num = randomIntBetween(1,5); + int num = randomIntBetween(1, 5); ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); String selectedIndexName = null; for (int j = 0; j < 5; j++) { @@ -173,10 +180,13 @@ public void testOneIndex() throws IOException { jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, indexName)); } - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, jobs)) + ) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -191,11 +201,13 @@ public void testOneIndex() throws IOException { public void testNonRollupMeta() throws IOException { String indexPattern = randomBoolean() ? randomAlphaOfLength(10) : randomAlphaOfLength(10) + "-*"; - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap("foo", - Collections.singletonMap("bar", "baz"))))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap("foo", Collections.singletonMap("bar", "baz"))) + ) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -212,9 +224,10 @@ public void testNonRollupPlusRollupMeta() throws IOException { metaMap.put("foo", Collections.singletonMap("bar", "baz")); metaMap.put(RollupField.ROLLUP_META, Collections.singletonMap(jobName, job)); - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", metaMap))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap(RollupField.TYPE_NAME, Collections.singletonMap("_meta", metaMap)) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -243,7 +256,7 @@ public void testRandomNonRollupPlusRollupMeta() throws IOException { metaMap.put(randomAlphaOfLength(5), fields); } - int numJobs = randomIntBetween(1,5); + int numJobs = randomIntBetween(1, 5); Map jobs = new HashMap<>(numJobs); for (int i = 0; i < numJobs; i++) { String name = randomAlphaOfLength(5); @@ -251,9 +264,10 @@ public void testRandomNonRollupPlusRollupMeta() throws IOException { } metaMap.put(RollupField.ROLLUP_META, jobs); - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", metaMap))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap(RollupField.TYPE_NAME, Collections.singletonMap("_meta", metaMap)) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -265,8 +279,10 @@ public void testRandomNonRollupPlusRollupMeta() throws IOException { public void testEmptyType() throws IOException { String indexPattern = randomBoolean() ? randomAlphaOfLength(10) : randomAlphaOfLength(10) + "-*"; - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, Collections.emptyMap())); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap(RollupField.TYPE_NAME, Collections.emptyMap()) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -274,5 +290,3 @@ public void testEmptyType() throws IOException { assertFalse(caps.isPresent()); } } - - diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java index 849c10f9b340c..5c2dc6e1e88f6 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.rollup.action; - import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.metadata.Metadata; @@ -28,15 +27,14 @@ import static org.elasticsearch.xpack.rollup.action.TransportGetRollupIndexCapsAction.getCapsByRollupIndex; import static org.hamcrest.Matchers.equalTo; - public class GetRollupIndexCapsActionRequestTests extends AbstractWireSerializingTestCase { @Override protected GetRollupIndexCapsAction.Request createTestInstance() { if (randomBoolean()) { - return new GetRollupIndexCapsAction.Request(new String[]{Metadata.ALL}); + return new GetRollupIndexCapsAction.Request(new String[] { Metadata.ALL }); } - return new GetRollupIndexCapsAction.Request(new String[]{randomAlphaOfLengthBetween(1, 20)}); + return new GetRollupIndexCapsAction.Request(new String[] { randomAlphaOfLengthBetween(1, 20) }); } @Override @@ -51,7 +49,7 @@ public void testNoIndicesByRollup() { } public void testAllIndicesByRollupSingleRollup() throws IOException { - int num = randomIntBetween(1,5); + int num = randomIntBetween(1, 5); ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); int indexCounter = 0; for (int j = 0; j < 5; j++) { @@ -64,18 +62,20 @@ public void testAllIndicesByRollupSingleRollup() throws IOException { jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, indexName, "foo")); } - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, jobs)) + ) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); indices.put("foo", meta); } - Map caps = getCapsByRollupIndex(Collections.singletonList("foo"), - indices.build()); + Map caps = getCapsByRollupIndex(Collections.singletonList("foo"), indices.build()); assertThat(caps.size(), equalTo(1)); } @@ -90,10 +90,13 @@ public void testAllIndicesByRollupManyRollup() throws IOException { indexCounter += 1; jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, indexName, "rollup_" + indexName)); - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, jobs)) + ) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -104,7 +107,6 @@ public void testAllIndicesByRollupManyRollup() throws IOException { assertThat(caps.size(), equalTo(5)); } - public void testOneIndexByRollupManyRollup() throws IOException { ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); int indexCounter = 0; @@ -116,10 +118,13 @@ public void testOneIndexByRollupManyRollup() throws IOException { indexCounter += 1; jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, "foo_" + indexName, "rollup_" + indexName)); - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, jobs)) + ) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -143,10 +148,13 @@ public void testOneIndexByRollupOneRollup() throws IOException { indexCounter += 1; jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, "foo_" + indexName, "rollup_foo")); - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, jobs)) + ) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); Mockito.when(meta.mapping()).thenReturn(mappingMeta); @@ -159,5 +167,3 @@ public void testOneIndexByRollupOneRollup() throws IOException { assertThat(caps.get("rollup_foo").getJobCaps().size(), equalTo(1)); } } - - diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobActionRequestTests.java index b87c4e88b4e11..c3b55bec4fafe 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobActionRequestTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.rollup.action; - import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; @@ -22,7 +21,7 @@ public class PutJobActionRequestTests extends AbstractSerializingTestCase testListener = ActionListener.wrap(response -> { - fail("Listener success should not have been triggered."); - }, e -> { - assertThat(e.getMessage(), equalTo("Could not create index for rollup job [foo]")); - assertThat(e.getCause().getMessage(), equalTo("something bad")); - }); + ActionListener testListener = ActionListener.wrap( + response -> { fail("Listener success should not have been triggered."); }, + e -> { + assertThat(e.getMessage(), equalTo("Could not create index for rollup job [foo]")); + assertThat(e.getCause().getMessage(), equalTo("something bad")); + } + ); Logger logger = mock(Logger.class); Client client = mock(Client.class); @@ -84,11 +84,10 @@ public void testCreateIndexException() { public void testIndexAlreadyExists() { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); - ActionListener testListener = ActionListener.wrap(response -> { - fail("Listener success should not have been triggered."); - }, e -> { - assertThat(e.getCause().getMessage(), equalTo("Ending")); - }); + ActionListener testListener = ActionListener.wrap( + response -> { fail("Listener success should not have been triggered."); }, + e -> { assertThat(e.getCause().getMessage(), equalTo("Ending")); } + ); Logger logger = mock(Logger.class); Client client = mock(Client.class); @@ -116,11 +115,10 @@ public void testIndexAlreadyExists() { public void testIndexMetadata() throws InterruptedException { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); - ActionListener testListener = ActionListener.wrap(response -> { - fail("Listener success should not have been triggered."); - }, e -> { - assertThat(e.getCause().getMessage(), equalTo("Ending")); - }); + ActionListener testListener = ActionListener.wrap( + response -> { fail("Listener success should not have been triggered."); }, + e -> { assertThat(e.getCause().getMessage(), equalTo("Ending")); } + ); Logger logger = mock(Logger.class); Client client = mock(Client.class); @@ -159,12 +157,13 @@ public void testIndexMetadata() throws InterruptedException { public void testGetMappingFails() { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); - ActionListener testListener = ActionListener.wrap(response -> { - fail("Listener success should not have been triggered."); - }, e -> { - assertThat(e.getMessage(), equalTo("Could not update mappings for rollup job [foo]")); - assertThat(e.getCause().getMessage(), equalTo("something bad")); - }); + ActionListener testListener = ActionListener.wrap( + response -> { fail("Listener success should not have been triggered."); }, + e -> { + assertThat(e.getMessage(), equalTo("Could not update mappings for rollup job [foo]")); + assertThat(e.getCause().getMessage(), equalTo("something bad")); + } + ); Logger logger = mock(Logger.class); Client client = mock(Client.class); @@ -183,13 +182,20 @@ public void testGetMappingFails() { public void testNoMetadataInMapping() { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); - ActionListener testListener = ActionListener.wrap(response -> { - fail("Listener success should not have been triggered."); - }, e -> { - assertThat(e.getMessage(), equalTo("Rollup data cannot be added to existing indices that contain " + - "non-rollup data (expected to find _meta key in mapping of rollup index [" - + job.getConfig().getRollupIndex() + "] but not found).")); - }); + ActionListener testListener = ActionListener.wrap( + response -> { fail("Listener success should not have been triggered."); }, + e -> { + assertThat( + e.getMessage(), + equalTo( + "Rollup data cannot be added to existing indices that contain " + + "non-rollup data (expected to find _meta key in mapping of rollup index [" + + job.getConfig().getRollupIndex() + + "] but not found)." + ) + ); + } + ); Logger logger = mock(Logger.class); Client client = mock(Client.class); @@ -215,13 +221,20 @@ public void testNoMetadataInMapping() { public void testMetadataButNotRollup() { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); - ActionListener testListener = ActionListener.wrap(response -> { - fail("Listener success should not have been triggered."); - }, e -> { - assertThat(e.getMessage(), equalTo("Rollup data cannot be added to existing indices that contain " + - "non-rollup data (expected to find rollup meta key [_rollup] in mapping of rollup index [" - + job.getConfig().getRollupIndex() + "] but not found).")); - }); + ActionListener testListener = ActionListener.wrap( + response -> { fail("Listener success should not have been triggered."); }, + e -> { + assertThat( + e.getMessage(), + equalTo( + "Rollup data cannot be added to existing indices that contain " + + "non-rollup data (expected to find rollup meta key [_rollup] in mapping of rollup index [" + + job.getConfig().getRollupIndex() + + "] but not found)." + ) + ); + } + ); Logger logger = mock(Logger.class); Client client = mock(Client.class); @@ -230,10 +243,8 @@ public void testMetadataButNotRollup() { doAnswer(invocation -> { GetMappingsResponse response = mock(GetMappingsResponse.class); Map m = new HashMap<>(2); - m.put("random", - Collections.singletonMap(job.getConfig().getId(), job.getConfig())); - MappingMetadata meta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", m)); + m.put("random", Collections.singletonMap(job.getConfig().getId(), job.getConfig())); + MappingMetadata meta = new MappingMetadata(RollupField.TYPE_NAME, Collections.singletonMap("_meta", m)); ImmutableOpenMap.Builder builder2 = ImmutableOpenMap.builder(1); builder2.put(job.getConfig().getRollupIndex(), meta); @@ -251,12 +262,15 @@ public void testMetadataButNotRollup() { public void testNoMappingVersion() { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); - ActionListener testListener = ActionListener.wrap(response -> { - fail("Listener success should not have been triggered."); - }, e -> { - assertThat(e.getMessage(), equalTo("Could not determine version of existing rollup metadata for index [" - + job.getConfig().getRollupIndex() + "]")); - }); + ActionListener testListener = ActionListener.wrap( + response -> { fail("Listener success should not have been triggered."); }, + e -> { + assertThat( + e.getMessage(), + equalTo("Could not determine version of existing rollup metadata for index [" + job.getConfig().getRollupIndex() + "]") + ); + } + ); Logger logger = mock(Logger.class); Client client = mock(Client.class); @@ -265,10 +279,8 @@ public void testNoMappingVersion() { doAnswer(invocation -> { GetMappingsResponse response = mock(GetMappingsResponse.class); Map m = new HashMap<>(2); - m.put(RollupField.ROLLUP_META, - Collections.singletonMap(job.getConfig().getId(), job.getConfig())); - MappingMetadata meta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", m)); + m.put(RollupField.ROLLUP_META, Collections.singletonMap(job.getConfig().getId(), job.getConfig())); + MappingMetadata meta = new MappingMetadata(RollupField.TYPE_NAME, Collections.singletonMap("_meta", m)); ImmutableOpenMap.Builder builder2 = ImmutableOpenMap.builder(1); builder2.put(job.getConfig().getRollupIndex(), meta); @@ -286,11 +298,15 @@ public void testNoMappingVersion() { public void testJobAlreadyInMapping() { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); - ActionListener testListener = ActionListener.wrap(response -> { - fail("Listener success should not have been triggered."); - }, e -> { - assertThat(e.getMessage(), equalTo("Cannot create rollup job [foo] because job was previously created (existing metadata).")); - }); + ActionListener testListener = ActionListener.wrap( + response -> { fail("Listener success should not have been triggered."); }, + e -> { + assertThat( + e.getMessage(), + equalTo("Cannot create rollup job [foo] because job was previously created (existing metadata).") + ); + } + ); Logger logger = mock(Logger.class); Client client = mock(Client.class); @@ -300,10 +316,8 @@ public void testJobAlreadyInMapping() { GetMappingsResponse response = mock(GetMappingsResponse.class); Map m = new HashMap<>(2); m.put(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD, VersionUtils.randomIndexCompatibleVersion(random())); - m.put(RollupField.ROLLUP_META, - Collections.singletonMap(job.getConfig().getId(), job.getConfig())); - MappingMetadata meta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", m)); + m.put(RollupField.ROLLUP_META, Collections.singletonMap(job.getConfig().getId(), job.getConfig())); + MappingMetadata meta = new MappingMetadata(RollupField.TYPE_NAME, Collections.singletonMap("_meta", m)); ImmutableOpenMap.Builder builder2 = ImmutableOpenMap.builder(1); builder2.put(job.getConfig().getRollupIndex(), meta); @@ -319,17 +333,24 @@ public void testJobAlreadyInMapping() { @SuppressWarnings({ "unchecked", "rawtypes" }) public void testAddJobToMapping() { - final RollupJobConfig unrelatedJob = - ConfigTestHelpers.randomRollupJobConfig(random(), ESTestCase.randomAlphaOfLength(10), "foo", "rollup_index_foo"); + final RollupJobConfig unrelatedJob = ConfigTestHelpers.randomRollupJobConfig( + random(), + ESTestCase.randomAlphaOfLength(10), + "foo", + "rollup_index_foo" + ); - final RollupJobConfig config = - ConfigTestHelpers.randomRollupJobConfig(random(), ESTestCase.randomAlphaOfLength(10), "foo", "rollup_index_foo"); + final RollupJobConfig config = ConfigTestHelpers.randomRollupJobConfig( + random(), + ESTestCase.randomAlphaOfLength(10), + "foo", + "rollup_index_foo" + ); RollupJob job = new RollupJob(config, Collections.emptyMap()); - ActionListener testListener = ActionListener.wrap(response -> { - fail("Listener success should not have been triggered."); - }, e -> { - assertThat(e.getMessage(), equalTo("Ending")); - }); + ActionListener testListener = ActionListener.wrap( + response -> { fail("Listener success should not have been triggered."); }, + e -> { assertThat(e.getMessage(), equalTo("Ending")); } + ); Logger logger = mock(Logger.class); Client client = mock(Client.class); @@ -339,10 +360,8 @@ public void testAddJobToMapping() { GetMappingsResponse response = mock(GetMappingsResponse.class); Map m = new HashMap<>(2); m.put(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD, VersionUtils.randomIndexCompatibleVersion(random())); - m.put(RollupField.ROLLUP_META, - Collections.singletonMap(unrelatedJob.getId(), unrelatedJob)); - MappingMetadata meta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", m)); + m.put(RollupField.ROLLUP_META, Collections.singletonMap(unrelatedJob.getId(), unrelatedJob)); + MappingMetadata meta = new MappingMetadata(RollupField.TYPE_NAME, Collections.singletonMap("_meta", m)); ImmutableOpenMap.Builder builder2 = ImmutableOpenMap.builder(1); builder2.put(unrelatedJob.getRollupIndex(), meta); @@ -368,11 +387,10 @@ public void testAddJobToMapping() { public void testTaskAlreadyExists() { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); - ActionListener testListener = ActionListener.wrap(response -> { - fail("Listener success should not have been triggered."); - }, e -> { - assertThat(e.getMessage(), equalTo("Cannot create job [foo] because it has already been created (task exists)")); - }); + ActionListener testListener = ActionListener.wrap( + response -> { fail("Listener success should not have been triggered."); }, + e -> { assertThat(e.getMessage(), equalTo("Cannot create job [foo] because it has already been created (task exists)")); } + ); PersistentTasksService tasksService = mock(PersistentTasksService.class); @@ -380,8 +398,7 @@ public void testTaskAlreadyExists() { doAnswer(invocation -> { requestCaptor.getValue().onFailure(new ResourceAlreadyExistsException(job.getConfig().getRollupIndex())); return null; - }).when(tasksService).sendStartRequest(eq(job.getConfig().getId()), - eq(RollupField.TASK_NAME), eq(job), requestCaptor.capture()); + }).when(tasksService).sendStartRequest(eq(job.getConfig().getId()), eq(RollupField.TASK_NAME), eq(job), requestCaptor.capture()); TransportPutRollupJobAction.startPersistentTask(job, testListener, tasksService); verify(tasksService).sendStartRequest(eq(job.getConfig().getId()), eq(RollupField.TASK_NAME), eq(job), any()); @@ -391,25 +408,29 @@ public void testTaskAlreadyExists() { public void testStartTask() { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); - ActionListener testListener = ActionListener.wrap(response -> { - fail("Listener success should not have been triggered."); - }, e -> { - assertThat(e.getMessage(), equalTo("Ending")); - }); + ActionListener testListener = ActionListener.wrap( + response -> { fail("Listener success should not have been triggered."); }, + e -> { assertThat(e.getMessage(), equalTo("Ending")); } + ); PersistentTasksService tasksService = mock(PersistentTasksService.class); ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ActionListener.class); doAnswer(invocation -> { - PersistentTasksCustomMetadata.PersistentTask response - = new PersistentTasksCustomMetadata.PersistentTask<>(job.getConfig().getId(), RollupField.TASK_NAME, job, 123, - mock(PersistentTasksCustomMetadata.Assignment.class)); + PersistentTasksCustomMetadata.PersistentTask response = new PersistentTasksCustomMetadata.PersistentTask<>( + job.getConfig().getId(), + RollupField.TASK_NAME, + job, + 123, + mock(PersistentTasksCustomMetadata.Assignment.class) + ); requestCaptor.getValue().onResponse(response); return null; }).when(tasksService).sendStartRequest(eq(job.getConfig().getId()), eq(RollupField.TASK_NAME), eq(job), requestCaptor.capture()); - ArgumentCaptor requestCaptor2 - = ArgumentCaptor.forClass(PersistentTasksService.WaitForPersistentTaskListener.class); + ArgumentCaptor requestCaptor2 = ArgumentCaptor.forClass( + PersistentTasksService.WaitForPersistentTaskListener.class + ); doAnswer(invocation -> { // Bail here with an error, further testing will happen through tests of #startPersistentTask requestCaptor2.getValue().onFailure(new RuntimeException("Ending")); @@ -425,20 +446,37 @@ public void testDeprecatedTimeZone() { GroupConfig groupConfig = new GroupConfig( new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("1h"), null, "Japan") ); - RollupJobConfig config = new RollupJobConfig("foo", randomAlphaOfLength(5), "rollup", ConfigTestHelpers.randomCron(), - 100, groupConfig, Collections.emptyList(), null); + RollupJobConfig config = new RollupJobConfig( + "foo", + randomAlphaOfLength(5), + "rollup", + ConfigTestHelpers.randomCron(), + 100, + groupConfig, + Collections.emptyList(), + null + ); PutRollupJobAction.Request request = new PutRollupJobAction.Request(config); TransportPutRollupJobAction.checkForDeprecatedTZ(request); - assertWarnings("Creating Rollup job [foo] with timezone [Japan], but [Japan] has been deprecated by the IANA. " + - "Use [Asia/Tokyo] instead."); + assertWarnings( + "Creating Rollup job [foo] with timezone [Japan], but [Japan] has been deprecated by the IANA. " + "Use [Asia/Tokyo] instead." + ); } public void testTimeZone() { GroupConfig groupConfig = new GroupConfig( new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("1h"), null, "EST") ); - RollupJobConfig config = new RollupJobConfig("foo", randomAlphaOfLength(5), "rollup", ConfigTestHelpers.randomCron(), - 100, groupConfig, Collections.emptyList(), null); + RollupJobConfig config = new RollupJobConfig( + "foo", + randomAlphaOfLength(5), + "rollup", + ConfigTestHelpers.randomCron(), + 100, + groupConfig, + Collections.emptyList(), + null + ); PutRollupJobAction.Request request = new PutRollupJobAction.Request(config); TransportPutRollupJobAction.checkForDeprecatedTZ(request); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/RollupIndexCapsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/RollupIndexCapsTests.java index 6664764503b82..6cb1cc518420a 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/RollupIndexCapsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/RollupIndexCapsTests.java @@ -36,9 +36,10 @@ public void testGetAllJobs() { RollupIndexCaps caps = new RollupIndexCaps(ESTestCase.randomAlphaOfLength(10), jobs); assertTrue(caps.hasCaps()); - List jobCaps = caps.getJobCapsByIndexPattern(Metadata.ALL).stream() - .map(RollupJobCaps::getJobID) - .collect(Collectors.toList()); + List jobCaps = caps.getJobCapsByIndexPattern(Metadata.ALL) + .stream() + .map(RollupJobCaps::getJobID) + .collect(Collectors.toList()); assertThat(jobCaps.size(), equalTo(2)); assertTrue(jobCaps.contains("foo")); assertTrue(jobCaps.contains("bar")); @@ -51,9 +52,10 @@ public void testFilterGetJobs() { RollupIndexCaps caps = new RollupIndexCaps(ESTestCase.randomAlphaOfLength(10), jobs); assertTrue(caps.hasCaps()); - List jobCaps = caps.getJobCapsByIndexPattern("foo_index_pattern").stream() - .map(RollupJobCaps::getJobID) - .collect(Collectors.toList()); + List jobCaps = caps.getJobCapsByIndexPattern("foo_index_pattern") + .stream() + .map(RollupJobCaps::getJobID) + .collect(Collectors.toList()); assertThat(jobCaps.size(), equalTo(1)); assertTrue(jobCaps.contains("foo")); assertFalse(jobCaps.contains("bar")); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java index 4bd74d1c274b7..8f2de11f197ad 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java @@ -93,18 +93,23 @@ public void setUp() throws Exception { } public void testNonZeroSize() { - String[] normalIndices = new String[]{randomAlphaOfLength(10)}; - String[] rollupIndices = new String[]{randomAlphaOfLength(10)}; - TransportRollupSearchAction.RollupSearchContext ctx - = new TransportRollupSearchAction.RollupSearchContext(normalIndices, rollupIndices, Collections.emptySet()); + String[] normalIndices = new String[] { randomAlphaOfLength(10) }; + String[] rollupIndices = new String[] { randomAlphaOfLength(10) }; + TransportRollupSearchAction.RollupSearchContext ctx = new TransportRollupSearchAction.RollupSearchContext( + normalIndices, + rollupIndices, + Collections.emptySet() + ); SearchSourceBuilder source = new SearchSourceBuilder(); source.query(new MatchAllQueryBuilder()); source.size(100); source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").fixedInterval(new DateHistogramInterval("123ms"))); SearchRequest request = new SearchRequest(normalIndices, source); NamedWriteableRegistry registry = mock(NamedWriteableRegistry.class); - Exception e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.createMSearchRequest(request, registry, ctx)); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> TransportRollupSearchAction.createMSearchRequest(request, registry, ctx) + ); assertThat(e.getMessage(), equalTo("Rollup does not support returning search hits, please try again with [size: 0].")); } @@ -113,67 +118,78 @@ public void testBadQuery() { source.query(new MatchPhraseQueryBuilder("foo", "bar")); source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").fixedInterval(new DateHistogramInterval("123ms"))); source.size(0); - Exception e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.rewriteQuery(new MatchPhraseQueryBuilder("foo", "bar"), Collections.emptySet())); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> TransportRollupSearchAction.rewriteQuery(new MatchPhraseQueryBuilder("foo", "bar"), Collections.emptySet()) + ); assertThat(e.getMessage(), equalTo("Unsupported Query in search request: [match_phrase]")); } public void testRangeTimezoneUTC() { final GroupConfig groupConfig = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")) + ); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("UTC"), caps); assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); - assertThat(((RangeQueryBuilder)rewritten).fieldName(), equalTo("foo.date_histogram.timestamp")); - assertThat(((RangeQueryBuilder)rewritten).timeZone(), equalTo("UTC")); + assertThat(((RangeQueryBuilder) rewritten).fieldName(), equalTo("foo.date_histogram.timestamp")); + assertThat(((RangeQueryBuilder) rewritten).timeZone(), equalTo("UTC")); } public void testRangeNullTimeZone() { final GroupConfig groupConfig = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"), null, null)); - final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"), null, null) + ); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1), caps); assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); - assertThat(((RangeQueryBuilder)rewritten).fieldName(), equalTo("foo.date_histogram.timestamp")); - assertNull(((RangeQueryBuilder)rewritten).timeZone()); + assertThat(((RangeQueryBuilder) rewritten).fieldName(), equalTo("foo.date_histogram.timestamp")); + assertNull(((RangeQueryBuilder) rewritten).timeZone()); } public void testRangeDifferentTZ() { final GroupConfig groupConfig = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"), null, "UTC")); - final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"), null, "UTC") + ); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("CET"), caps); assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); - assertThat(((RangeQueryBuilder)rewritten).fieldName(), equalTo("foo.date_histogram.timestamp")); + assertThat(((RangeQueryBuilder) rewritten).fieldName(), equalTo("foo.date_histogram.timestamp")); } public void testTermQuery() { final TermsGroupConfig terms = new TermsGroupConfig("foo"); final GroupConfig groupConfig = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("boo", new DateHistogramInterval("1h")), null, terms); - final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("boo", new DateHistogramInterval("1h")), + null, + terms + ); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new TermQueryBuilder("foo", "bar"), caps); assertThat(rewritten, instanceOf(TermQueryBuilder.class)); - assertThat(((TermQueryBuilder)rewritten).fieldName(), equalTo("foo.terms.value")); + assertThat(((TermQueryBuilder) rewritten).fieldName(), equalTo("foo.terms.value")); } public void testTermsQuery() { final TermsGroupConfig terms = new TermsGroupConfig("foo"); final GroupConfig groupConfig = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("boo", new DateHistogramInterval("1h")), null, terms); - final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("boo", new DateHistogramInterval("1h")), + null, + terms + ); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); @@ -181,14 +197,15 @@ public void testTermsQuery() { QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(original, caps); assertThat(rewritten, instanceOf(TermsQueryBuilder.class)); assertNotSame(rewritten, original); - assertThat(((TermsQueryBuilder)rewritten).fieldName(), equalTo("foo.terms.value")); - assertThat(((TermsQueryBuilder)rewritten).values(), equalTo(Arrays.asList("bar", "baz"))); + assertThat(((TermsQueryBuilder) rewritten).fieldName(), equalTo("foo.terms.value")); + assertThat(((TermsQueryBuilder) rewritten).values(), equalTo(Arrays.asList("bar", "baz"))); } public void testCompounds() { final GroupConfig groupConfig = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")) + ); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); @@ -197,13 +214,14 @@ public void testCompounds() { builder.must(getQueryBuilder(2)); QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(builder, caps); assertThat(rewritten, instanceOf(BoolQueryBuilder.class)); - assertThat(((BoolQueryBuilder)rewritten).must().size(), equalTo(1)); + assertThat(((BoolQueryBuilder) rewritten).must().size(), equalTo(1)); } public void testMatchAll() { final GroupConfig groupConfig = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")) + ); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); @@ -214,15 +232,25 @@ public void testMatchAll() { public void testAmbiguousResolution() { final TermsGroupConfig terms = new TermsGroupConfig("foo"); final GroupConfig groupConfig = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), null, terms); - final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), + null, + terms + ); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1), caps)); - assertThat(e.getMessage(), equalTo("Ambiguous field name resolution when mapping to rolled fields. " + - "Field name [foo] was mapped to: [foo.date_histogram.timestamp,foo.terms.value].")); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1), caps) + ); + assertThat( + e.getMessage(), + equalTo( + "Ambiguous field name resolution when mapping to rolled fields. " + + "Field name [foo] was mapped to: [foo.date_histogram.timestamp,foo.terms.value]." + ) + ); } public static QueryBuilder getQueryBuilder(int levels) { @@ -230,7 +258,7 @@ public static QueryBuilder getQueryBuilder(int levels) { return ESTestCase.randomBoolean() ? new MatchAllQueryBuilder() : new RangeQueryBuilder("foo").gt(1); } - int choice = ESTestCase.randomIntBetween(0,5); + int choice = ESTestCase.randomIntBetween(0, 5); if (choice == 0) { BoolQueryBuilder b = new BoolQueryBuilder(); b.must(getQueryBuilder(levels - 1)); @@ -257,70 +285,74 @@ public static QueryBuilder getQueryBuilder(int levels) { } public void testPostFilter() { - String[] normalIndices = new String[]{randomAlphaOfLength(10)}; - String[] rollupIndices = new String[]{randomAlphaOfLength(10)}; - TransportRollupSearchAction.RollupSearchContext ctx - = new TransportRollupSearchAction.RollupSearchContext(normalIndices, rollupIndices, Collections.emptySet()); + String[] normalIndices = new String[] { randomAlphaOfLength(10) }; + String[] rollupIndices = new String[] { randomAlphaOfLength(10) }; + TransportRollupSearchAction.RollupSearchContext ctx = new TransportRollupSearchAction.RollupSearchContext( + normalIndices, + rollupIndices, + Collections.emptySet() + ); SearchSourceBuilder source = new SearchSourceBuilder(); source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").fixedInterval(new DateHistogramInterval("123ms"))); source.postFilter(new TermQueryBuilder("foo", "bar")); source.size(0); SearchRequest request = new SearchRequest(normalIndices, source); NamedWriteableRegistry registry = mock(NamedWriteableRegistry.class); - Exception e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.createMSearchRequest(request, registry, ctx)); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> TransportRollupSearchAction.createMSearchRequest(request, registry, ctx) + ); assertThat(e.getMessage(), equalTo("Rollup search does not support post filtering.")); } public void testSuggest() { - String[] normalIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; + String[] normalIndices = new String[] { ESTestCase.randomAlphaOfLength(10) }; SearchSourceBuilder source = new SearchSourceBuilder(); source.suggest(new SuggestBuilder()); source.size(0); SearchRequest request = new SearchRequest(normalIndices, source); - Exception e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.validateSearchRequest(request)); + Exception e = expectThrows(IllegalArgumentException.class, () -> TransportRollupSearchAction.validateSearchRequest(request)); assertThat(e.getMessage(), equalTo("Rollup search does not support suggestors.")); } public void testHighlighters() { - String[] normalIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; + String[] normalIndices = new String[] { ESTestCase.randomAlphaOfLength(10) }; SearchSourceBuilder source = new SearchSourceBuilder(); source.highlighter(new HighlightBuilder()); source.size(0); SearchRequest request = new SearchRequest(normalIndices, source); - Exception e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.validateSearchRequest(request)); + Exception e = expectThrows(IllegalArgumentException.class, () -> TransportRollupSearchAction.validateSearchRequest(request)); assertThat(e.getMessage(), equalTo("Rollup search does not support highlighting.")); } public void testProfiling() { - String[] normalIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; + String[] normalIndices = new String[] { ESTestCase.randomAlphaOfLength(10) }; SearchSourceBuilder source = new SearchSourceBuilder(); source.profile(true); source.size(0); SearchRequest request = new SearchRequest(normalIndices, source); - Exception e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.validateSearchRequest(request)); + Exception e = expectThrows(IllegalArgumentException.class, () -> TransportRollupSearchAction.validateSearchRequest(request)); assertThat(e.getMessage(), equalTo("Rollup search does not support profiling at the moment.")); } public void testExplain() { - String[] normalIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; + String[] normalIndices = new String[] { ESTestCase.randomAlphaOfLength(10) }; SearchSourceBuilder source = new SearchSourceBuilder(); source.explain(true); source.size(0); SearchRequest request = new SearchRequest(normalIndices, source); - Exception e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.validateSearchRequest(request)); + Exception e = expectThrows(IllegalArgumentException.class, () -> TransportRollupSearchAction.validateSearchRequest(request)); assertThat(e.getMessage(), equalTo("Rollup search does not support explaining.")); } public void testNoRollupAgg() { - String[] normalIndices = new String[]{}; - String[] rollupIndices = new String[]{randomAlphaOfLength(10)}; - TransportRollupSearchAction.RollupSearchContext ctx - = new TransportRollupSearchAction.RollupSearchContext(normalIndices, rollupIndices, Collections.emptySet()); + String[] normalIndices = new String[] {}; + String[] rollupIndices = new String[] { randomAlphaOfLength(10) }; + TransportRollupSearchAction.RollupSearchContext ctx = new TransportRollupSearchAction.RollupSearchContext( + normalIndices, + rollupIndices, + Collections.emptySet() + ); SearchSourceBuilder source = new SearchSourceBuilder(); source.query(new MatchAllQueryBuilder()); source.size(0); @@ -331,27 +363,34 @@ public void testNoRollupAgg() { assertThat(msearch.requests().get(0), equalTo(request)); } - public void testNoLiveNoRollup() { String[] normalIndices = new String[0]; String[] rollupIndices = new String[0]; - TransportRollupSearchAction.RollupSearchContext ctx - = new TransportRollupSearchAction.RollupSearchContext(normalIndices, rollupIndices, Collections.emptySet()); + TransportRollupSearchAction.RollupSearchContext ctx = new TransportRollupSearchAction.RollupSearchContext( + normalIndices, + rollupIndices, + Collections.emptySet() + ); SearchSourceBuilder source = new SearchSourceBuilder(); source.query(new MatchAllQueryBuilder()); source.size(0); SearchRequest request = new SearchRequest(normalIndices, source); NamedWriteableRegistry registry = mock(NamedWriteableRegistry.class); - Exception e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.createMSearchRequest(request, registry, ctx)); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> TransportRollupSearchAction.createMSearchRequest(request, registry, ctx) + ); assertThat(e.getMessage(), equalTo("Must specify at least one rollup index in _rollup_search API")); } public void testLiveOnlyCreateMSearch() { - String[] normalIndices = new String[]{randomAlphaOfLength(10)}; + String[] normalIndices = new String[] { randomAlphaOfLength(10) }; String[] rollupIndices = new String[0]; - TransportRollupSearchAction.RollupSearchContext ctx - = new TransportRollupSearchAction.RollupSearchContext(normalIndices, rollupIndices, Collections.emptySet()); + TransportRollupSearchAction.RollupSearchContext ctx = new TransportRollupSearchAction.RollupSearchContext( + normalIndices, + rollupIndices, + Collections.emptySet() + ); SearchSourceBuilder source = new SearchSourceBuilder(); source.query(new MatchAllQueryBuilder()); source.size(0); @@ -364,23 +403,29 @@ public void testLiveOnlyCreateMSearch() { public void testGood() { final GroupConfig groupConfig = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); - final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")) + ); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = singleton(cap); - String[] normalIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; - String[] rollupIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; - String[] combinedIndices = new String[]{normalIndices[0], rollupIndices[0]}; + String[] normalIndices = new String[] { ESTestCase.randomAlphaOfLength(10) }; + String[] rollupIndices = new String[] { ESTestCase.randomAlphaOfLength(10) }; + String[] combinedIndices = new String[] { normalIndices[0], rollupIndices[0] }; - TransportRollupSearchAction.RollupSearchContext ctx - = new TransportRollupSearchAction.RollupSearchContext(normalIndices, rollupIndices, caps); + TransportRollupSearchAction.RollupSearchContext ctx = new TransportRollupSearchAction.RollupSearchContext( + normalIndices, + rollupIndices, + caps + ); SearchSourceBuilder source = new SearchSourceBuilder(); source.query(getQueryBuilder(1)); source.size(0); - source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(config.getGroupConfig().getDateHistogram().getInterval())); + source.aggregation( + new DateHistogramAggregationBuilder("foo").field("foo") + .calendarInterval(config.getGroupConfig().getDateHistogram().getInterval()) + ); SearchRequest request = new SearchRequest(combinedIndices, source); MultiSearchRequest msearch = TransportRollupSearchAction.createMSearchRequest(request, namedWriteableRegistry, ctx); @@ -394,13 +439,13 @@ public void testGood() { SearchRequest rollup = msearch.requests().get(1); assertThat(rollup.indices().length, equalTo(1)); assertThat(rollup.indices()[0], equalTo(rollupIndices[0])); - assert(rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); + assert (rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); } public void testGoodButNullQuery() { - String[] normalIndices = new String[]{randomAlphaOfLength(10)}; - String[] rollupIndices = new String[]{randomAlphaOfLength(10)}; - String[] combinedIndices = new String[]{normalIndices[0], rollupIndices[0]}; + String[] normalIndices = new String[] { randomAlphaOfLength(10) }; + String[] rollupIndices = new String[] { randomAlphaOfLength(10) }; + String[] combinedIndices = new String[] { normalIndices[0], rollupIndices[0] }; SearchSourceBuilder source = new SearchSourceBuilder(); source.query(null); @@ -409,12 +454,16 @@ public void testGoodButNullQuery() { SearchRequest request = new SearchRequest(combinedIndices, source); final GroupConfig groupConfig = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID())); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()) + ); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); Set caps = singleton(new RollupJobCaps(job)); - TransportRollupSearchAction.RollupSearchContext ctx - = new TransportRollupSearchAction.RollupSearchContext(normalIndices, rollupIndices, caps); + TransportRollupSearchAction.RollupSearchContext ctx = new TransportRollupSearchAction.RollupSearchContext( + normalIndices, + rollupIndices, + caps + ); MultiSearchRequest msearch = TransportRollupSearchAction.createMSearchRequest(request, namedWriteableRegistry, ctx); assertThat(msearch.requests().size(), equalTo(2)); @@ -427,37 +476,52 @@ public void testGoodButNullQuery() { SearchRequest rollup = msearch.requests().get(1); assertThat(rollup.indices().length, equalTo(1)); assertThat(rollup.indices()[0], equalTo(rollupIndices[0])); - assert(rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); + assert (rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); } public void testTwoMatchingJobs() { final GroupConfig groupConfig = new GroupConfig( - new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), null, null); - final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), + null, + null + ); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); // so that the jobs aren't exactly equal final List metricConfigs = ConfigTestHelpers.randomMetricsConfigs(random()); - final RollupJobConfig job2 = - new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, groupConfig, metricConfigs, null); + final RollupJobConfig job2 = new RollupJobConfig( + "foo2", + "index", + job.getRollupIndex(), + "*/5 * * * * ?", + 10, + groupConfig, + metricConfigs, + null + ); RollupJobCaps cap2 = new RollupJobCaps(job2); Set caps = new HashSet<>(2); caps.add(cap); caps.add(cap2); - String[] normalIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; - String[] rollupIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; - String[] combinedIndices = new String[]{normalIndices[0], rollupIndices[0]}; + String[] normalIndices = new String[] { ESTestCase.randomAlphaOfLength(10) }; + String[] rollupIndices = new String[] { ESTestCase.randomAlphaOfLength(10) }; + String[] combinedIndices = new String[] { normalIndices[0], rollupIndices[0] }; - TransportRollupSearchAction.RollupSearchContext ctx - = new TransportRollupSearchAction.RollupSearchContext(normalIndices, rollupIndices, caps); + TransportRollupSearchAction.RollupSearchContext ctx = new TransportRollupSearchAction.RollupSearchContext( + normalIndices, + rollupIndices, + caps + ); SearchSourceBuilder source = new SearchSourceBuilder(); source.query(getQueryBuilder(1)); source.size(0); - source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(job.getGroupConfig().getDateHistogram().getInterval())); + source.aggregation( + new DateHistogramAggregationBuilder("foo").field("foo").calendarInterval(job.getGroupConfig().getDateHistogram().getInterval()) + ); SearchRequest request = new SearchRequest(combinedIndices, source); MultiSearchRequest msearch = TransportRollupSearchAction.createMSearchRequest(request, namedWriteableRegistry, ctx); @@ -471,40 +535,53 @@ public void testTwoMatchingJobs() { SearchRequest rollup = msearch.requests().get(1); assertThat(rollup.indices().length, equalTo(1)); assertThat(rollup.indices()[0], equalTo(rollupIndices[0])); - assert(rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); + assert (rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); assertThat(msearch.requests().size(), equalTo(2)); } public void testTwoMatchingJobsOneBetter() { - final GroupConfig groupConfig = - new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), null, null); - final RollupJobConfig job = - new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), + null, + null + ); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); - final GroupConfig groupConfig2 = - new GroupConfig(groupConfig.getDateHistogram(), randomHistogramGroupConfig(random()), null); - final RollupJobConfig job2 = - new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, groupConfig2, emptyList(), null); + final GroupConfig groupConfig2 = new GroupConfig(groupConfig.getDateHistogram(), randomHistogramGroupConfig(random()), null); + final RollupJobConfig job2 = new RollupJobConfig( + "foo2", + "index", + job.getRollupIndex(), + "*/5 * * * * ?", + 10, + groupConfig2, + emptyList(), + null + ); RollupJobCaps cap2 = new RollupJobCaps(job2); Set caps = new HashSet<>(2); caps.add(cap); caps.add(cap2); - String[] normalIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; - String[] rollupIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; - String[] combinedIndices = new String[]{normalIndices[0], rollupIndices[0]}; + String[] normalIndices = new String[] { ESTestCase.randomAlphaOfLength(10) }; + String[] rollupIndices = new String[] { ESTestCase.randomAlphaOfLength(10) }; + String[] combinedIndices = new String[] { normalIndices[0], rollupIndices[0] }; - TransportRollupSearchAction.RollupSearchContext ctx - = new TransportRollupSearchAction.RollupSearchContext(normalIndices, rollupIndices, caps); + TransportRollupSearchAction.RollupSearchContext ctx = new TransportRollupSearchAction.RollupSearchContext( + normalIndices, + rollupIndices, + caps + ); SearchSourceBuilder source = new SearchSourceBuilder(); source.query(getQueryBuilder(1)); source.size(0); - source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo") - .calendarInterval(job.getGroupConfig().getDateHistogram().getInterval())); + source.aggregation( + new DateHistogramAggregationBuilder("foo").field("foo").calendarInterval(job.getGroupConfig().getDateHistogram().getInterval()) + ); SearchRequest request = new SearchRequest(combinedIndices, source); MultiSearchRequest msearch = TransportRollupSearchAction.createMSearchRequest(request, namedWriteableRegistry, ctx); @@ -518,74 +595,77 @@ public void testTwoMatchingJobsOneBetter() { SearchRequest rollup = msearch.requests().get(1); assertThat(rollup.indices().length, equalTo(1)); assertThat(rollup.indices()[0], equalTo(rollupIndices[0])); - assert(rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); - + assert (rollup.source().aggregations().getAggregatorFactories().iterator().next() instanceof FilterAggregationBuilder); // The executed query should match the first job ("foo") because the second job contained a histo and the first didn't, // so the first job will be "better" - BoolQueryBuilder bool1 = new BoolQueryBuilder() - .must(TransportRollupSearchAction.rewriteQuery(request.source().query(), caps)) - .filter(new TermQueryBuilder(RollupField.formatMetaField(RollupField.ID.getPreferredName()), "foo")) - .filter(new TermsQueryBuilder(RollupField.formatMetaField(RollupField.VERSION_FIELD), - new long[]{Rollup.ROLLUP_VERSION_V1, Rollup.ROLLUP_VERSION_V2})); + BoolQueryBuilder bool1 = new BoolQueryBuilder().must(TransportRollupSearchAction.rewriteQuery(request.source().query(), caps)) + .filter(new TermQueryBuilder(RollupField.formatMetaField(RollupField.ID.getPreferredName()), "foo")) + .filter( + new TermsQueryBuilder( + RollupField.formatMetaField(RollupField.VERSION_FIELD), + new long[] { Rollup.ROLLUP_VERSION_V1, Rollup.ROLLUP_VERSION_V2 } + ) + ); assertThat(msearch.requests().get(1).source().query(), equalTo(bool1)); } public void testNoIndicesToSeparate() { - String[] indices = new String[]{}; + String[] indices = new String[] {}; ImmutableOpenMap meta = ImmutableOpenMap.builder().build(); expectThrows(IllegalArgumentException.class, () -> TransportRollupSearchAction.separateIndices(indices, meta)); } public void testSeparateAll() { - String[] indices = new String[]{Metadata.ALL, "foo"}; + String[] indices = new String[] { Metadata.ALL, "foo" }; ImmutableOpenMap meta = ImmutableOpenMap.builder().build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.separateIndices(indices, meta)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> TransportRollupSearchAction.separateIndices(indices, meta) + ); assertThat(e.getMessage(), equalTo("Searching _all via RollupSearch endpoint is not supported at this time.")); } public void testEmptyMetadata() { - String[] indices = new String[]{"foo", "bar"}; + String[] indices = new String[] { "foo", "bar" }; ImmutableOpenMap meta = ImmutableOpenMap.builder().build(); - TransportRollupSearchAction.RollupSearchContext result - = TransportRollupSearchAction.separateIndices(indices, meta); + TransportRollupSearchAction.RollupSearchContext result = TransportRollupSearchAction.separateIndices(indices, meta); assertThat(result.getLiveIndices().length, equalTo(2)); assertThat(result.getRollupIndices().length, equalTo(0)); assertThat(result.getJobCaps().size(), equalTo(0)); } public void testNoMatchingIndexInMetadata() { - String[] indices = new String[]{"foo"}; + String[] indices = new String[] { "foo" }; IndexMetadata indexMetadata = mock(IndexMetadata.class); ImmutableOpenMap.Builder meta = ImmutableOpenMap.builder(1); meta.put("bar", indexMetadata); - TransportRollupSearchAction.RollupSearchContext result - = TransportRollupSearchAction.separateIndices(indices, meta.build()); + TransportRollupSearchAction.RollupSearchContext result = TransportRollupSearchAction.separateIndices(indices, meta.build()); assertThat(result.getLiveIndices().length, equalTo(1)); assertThat(result.getRollupIndices().length, equalTo(0)); assertThat(result.getJobCaps().size(), equalTo(0)); } public void testMatchingIndexInMetadata() throws IOException { - String[] indices = new String[]{"foo"}; + String[] indices = new String[] { "foo" }; String jobName = randomAlphaOfLength(5); RollupJobConfig job = ConfigTestHelpers.randomRollupJobConfig(random(), jobName); - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, - Collections.singletonMap(jobName, job))))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, Collections.singletonMap(jobName, job))) + ) + ); IndexMetadata meta = Mockito.mock(IndexMetadata.class); when(meta.mapping()).thenReturn(mappingMeta); ImmutableOpenMap.Builder metaMap = ImmutableOpenMap.builder(1); metaMap.put("foo", meta); - TransportRollupSearchAction.RollupSearchContext result - = TransportRollupSearchAction.separateIndices(indices, metaMap.build()); + TransportRollupSearchAction.RollupSearchContext result = TransportRollupSearchAction.separateIndices(indices, metaMap.build()); assertThat(result.getLiveIndices().length, equalTo(0)); assertThat(result.getRollupIndices().length, equalTo(1)); assertThat(result.getRollupIndices()[0], equalTo("foo")); @@ -593,41 +673,44 @@ public void testMatchingIndexInMetadata() throws IOException { } public void testLiveOnlyProcess() throws Exception { - String[] indices = new String[]{"foo"}; + String[] indices = new String[] { "foo" }; IndexMetadata indexMetadata = mock(IndexMetadata.class); ImmutableOpenMap.Builder meta = ImmutableOpenMap.builder(1); meta.put("bar", indexMetadata); - TransportRollupSearchAction.RollupSearchContext result - = TransportRollupSearchAction.separateIndices(indices, meta.build()); + TransportRollupSearchAction.RollupSearchContext result = TransportRollupSearchAction.separateIndices(indices, meta.build()); SearchResponse response = mock(SearchResponse.class); MultiSearchResponse.Item item = new MultiSearchResponse.Item(response, null); - MultiSearchResponse msearchResponse = new MultiSearchResponse(new MultiSearchResponse.Item[]{item}, 1); + MultiSearchResponse msearchResponse = new MultiSearchResponse(new MultiSearchResponse.Item[] { item }, 1); - SearchResponse r = TransportRollupSearchAction.processResponses(result, - msearchResponse, mock(InternalAggregation.ReduceContext.class)); + SearchResponse r = TransportRollupSearchAction.processResponses( + result, + msearchResponse, + mock(InternalAggregation.ReduceContext.class) + ); assertThat(r, equalTo(response)); } public void testRollupOnly() throws Exception { - String[] indices = new String[]{"foo"}; + String[] indices = new String[] { "foo" }; String jobName = randomAlphaOfLength(5); RollupJobConfig job = ConfigTestHelpers.randomRollupJobConfig(random(), jobName); - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, - Collections.singletonMap(jobName, job))))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, Collections.singletonMap(jobName, job))) + ) + ); IndexMetadata indexMeta = Mockito.mock(IndexMetadata.class); when(indexMeta.mapping()).thenReturn(mappingMeta); ImmutableOpenMap.Builder metaMap = ImmutableOpenMap.builder(1); metaMap.put("foo", indexMeta); - TransportRollupSearchAction.RollupSearchContext result - = TransportRollupSearchAction.separateIndices(indices, metaMap.build()); + TransportRollupSearchAction.RollupSearchContext result = TransportRollupSearchAction.separateIndices(indices, metaMap.build()); SearchResponse response = mock(SearchResponse.class); when(response.getTook()).thenReturn(new TimeValue(100)); @@ -660,10 +743,13 @@ public void testRollupOnly() throws Exception { Aggregations mockAggs = InternalAggregations.from(aggTree); when(response.getAggregations()).thenReturn(mockAggs); MultiSearchResponse.Item item = new MultiSearchResponse.Item(response, null); - MultiSearchResponse msearchResponse = new MultiSearchResponse(new MultiSearchResponse.Item[]{item}, 1); + MultiSearchResponse msearchResponse = new MultiSearchResponse(new MultiSearchResponse.Item[] { item }, 1); - SearchResponse r = TransportRollupSearchAction.processResponses(result, - msearchResponse, mock(InternalAggregation.ReduceContext.class)); + SearchResponse r = TransportRollupSearchAction.processResponses( + result, + msearchResponse, + mock(InternalAggregation.ReduceContext.class) + ); assertNotNull(r); Aggregations responseAggs = r.getAggregations(); @@ -672,16 +758,18 @@ public void testRollupOnly() throws Exception { } public void testTooManyRollups() throws IOException { - String[] indices = new String[]{"foo", "bar"}; + String[] indices = new String[] { "foo", "bar" }; String jobName = randomAlphaOfLength(5); RollupJobConfig job = ConfigTestHelpers.randomRollupJobConfig(random(), jobName); - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, - Collections.singletonMap(jobName, job))))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, Collections.singletonMap(jobName, job))) + ) + ); IndexMetadata indexMeta = Mockito.mock(IndexMetadata.class); when(indexMeta.mapping()).thenReturn(mappingMeta); @@ -689,33 +777,47 @@ public void testTooManyRollups() throws IOException { ImmutableOpenMap.Builder metaMap = ImmutableOpenMap.builder(2); metaMap.put("foo", indexMeta); metaMap.put("bar", indexMeta); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.separateIndices(indices, metaMap.build())); - assertThat(e.getMessage(), equalTo("RollupSearch currently only supports searching one rollup index at a time. " + - "Found the following rollup indices: [foo, bar]")); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> TransportRollupSearchAction.separateIndices(indices, metaMap.build()) + ); + assertThat( + e.getMessage(), + equalTo( + "RollupSearch currently only supports searching one rollup index at a time. " + + "Found the following rollup indices: [foo, bar]" + ) + ); } public void testEmptyMsearch() { - TransportRollupSearchAction.RollupSearchContext result - = new TransportRollupSearchAction.RollupSearchContext(new String[0], new String[0], Collections.emptySet()); + TransportRollupSearchAction.RollupSearchContext result = new TransportRollupSearchAction.RollupSearchContext( + new String[0], + new String[0], + Collections.emptySet() + ); MultiSearchResponse msearchResponse = new MultiSearchResponse(new MultiSearchResponse.Item[0], 1); - RuntimeException e = expectThrows(RuntimeException.class, () -> TransportRollupSearchAction.processResponses(result, - msearchResponse, mock(InternalAggregation.ReduceContext.class))); + RuntimeException e = expectThrows( + RuntimeException.class, + () -> TransportRollupSearchAction.processResponses(result, msearchResponse, mock(InternalAggregation.ReduceContext.class)) + ); assertThat(e.getMessage(), equalTo("MSearch response was empty, cannot unroll RollupSearch results")); } public void testBoth() throws Exception { - String[] indices = new String[]{"foo", "bar"}; + String[] indices = new String[] { "foo", "bar" }; String jobName = randomAlphaOfLength(5); RollupJobConfig job = ConfigTestHelpers.randomRollupJobConfig(random(), jobName); - MappingMetadata mappingMeta = new MappingMetadata(RollupField.TYPE_NAME, - Collections.singletonMap(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, - Collections.singletonMap(jobName, job))))); + MappingMetadata mappingMeta = new MappingMetadata( + RollupField.TYPE_NAME, + Collections.singletonMap( + RollupField.TYPE_NAME, + Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, Collections.singletonMap(jobName, job))) + ) + ); IndexMetadata indexMeta = Mockito.mock(IndexMetadata.class); when(indexMeta.mapping()).thenReturn(mappingMeta); @@ -728,9 +830,10 @@ public void testBoth() throws Exception { ImmutableOpenMap.Builder metaMap = ImmutableOpenMap.builder(2); metaMap.put("foo", indexMeta); metaMap.put("bar", liveIndexMeta); - TransportRollupSearchAction.RollupSearchContext separateIndices - = TransportRollupSearchAction.separateIndices(indices, metaMap.build()); - + TransportRollupSearchAction.RollupSearchContext separateIndices = TransportRollupSearchAction.separateIndices( + indices, + metaMap.build() + ); SearchResponse protoResponse = mock(SearchResponse.class); when(protoResponse.getTook()).thenReturn(new TimeValue(100)); @@ -773,11 +876,16 @@ public void testBoth() throws Exception { when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item rolledResponse = new MultiSearchResponse.Item(responseWithout, null); - MultiSearchResponse msearchResponse - = new MultiSearchResponse(new MultiSearchResponse.Item[]{unrolledResponse, rolledResponse}, 123); + MultiSearchResponse msearchResponse = new MultiSearchResponse( + new MultiSearchResponse.Item[] { unrolledResponse, rolledResponse }, + 123 + ); - SearchResponse response = TransportRollupSearchAction.processResponses(separateIndices, msearchResponse, - mock(InternalAggregation.ReduceContext.class)); + SearchResponse response = TransportRollupSearchAction.processResponses( + separateIndices, + msearchResponse, + mock(InternalAggregation.ReduceContext.class) + ); assertNotNull(response); Aggregations responseAggs = response.getAggregations(); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/StartJobActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/StartJobActionRequestTests.java index 237bffde04a04..578b14905752e 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/StartJobActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/StartJobActionRequestTests.java @@ -22,4 +22,3 @@ protected Writeable.Reader instanceReader() { return Request::new; } } - diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelperTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelperTests.java index 03202cbe9fc54..2ca199bf87942 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelperTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelperTests.java @@ -48,9 +48,7 @@ public void testProcessRequestNoMatching() { Map tasks = getRandomTasks(); when(taskManager.getTasks()).thenReturn(tasks); - Consumer consumer = rollupJobTask -> { - fail("Should not have reached consumer"); - }; + Consumer consumer = rollupJobTask -> { fail("Should not have reached consumer"); }; TransportTaskHelper.doProcessTasks("foo", consumer, taskManager); } @@ -71,22 +69,30 @@ public void testProcessRequestMultipleMatching() { when(task2.getConfig()).thenReturn(job2); tasks.put(2L, task2); - Consumer consumer = rollupJobTask -> { - fail("Should not have reached consumer"); - }; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> TransportTaskHelper.doProcessTasks("foo", consumer, taskManager)); - assertThat(e.getMessage(), equalTo("Found more than one matching task for rollup job [foo] when " + - "there should only be one.")); + Consumer consumer = rollupJobTask -> { fail("Should not have reached consumer"); }; + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> TransportTaskHelper.doProcessTasks("foo", consumer, taskManager) + ); + assertThat(e.getMessage(), equalTo("Found more than one matching task for rollup job [foo] when " + "there should only be one.")); } private Map getRandomTasks() { - int num = randomIntBetween(1,10); + int num = randomIntBetween(1, 10); Map tasks = new HashMap<>(num); for (int i = 0; i < num; i++) { Long taskId = randomLongBetween(10, Long.MAX_VALUE); - tasks.put(taskId, new TestTask(taskId, randomAlphaOfLength(10), "test_action", "test_description", - new TaskId("node:123"), Collections.emptyMap())); + tasks.put( + taskId, + new TestTask( + taskId, + randomAlphaOfLength(10), + "test_action", + "test_description", + new TaskId("node:123"), + Collections.emptyMap() + ) + ); } return tasks; } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/job/RollupIndexTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/job/RollupIndexTests.java index 628714bbc1e0a..368395ac3b031 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/job/RollupIndexTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/job/RollupIndexTests.java @@ -59,7 +59,7 @@ public void testValidateFieldMatchingNotAggregatable() { } private String getRandomType() { - int n = randomIntBetween(0,8); + int n = randomIntBetween(0, 8); if (n == 0) { return "keyword"; } else if (n == 1) { diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java index f7afde7c2f53c..e7d5d7c365f8f 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java @@ -27,6 +27,7 @@ import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomRollupJobConfig; import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomTermsGroupConfig; import static org.hamcrest.Matchers.equalTo; + //TODO split this into dedicated unit test classes (one for each config object) public class ConfigTests extends ESTestCase { @@ -52,14 +53,15 @@ public void testEmptyGroup() { } public void testNoDateHisto() { - Exception e = expectThrows(IllegalArgumentException.class, - () -> new GroupConfig(null, randomHistogramGroupConfig(random()), randomTermsGroupConfig(random()))); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> new GroupConfig(null, randomHistogramGroupConfig(random()), randomTermsGroupConfig(random())) + ); assertThat(e.getMessage(), equalTo("Date histogram must not be null")); } public void testEmptyDateHistoField() { - Exception e = expectThrows(IllegalArgumentException.class, - () -> new CalendarInterval(null, DateHistogramInterval.HOUR)); + Exception e = expectThrows(IllegalArgumentException.class, () -> new CalendarInterval(null, DateHistogramInterval.HOUR)); assertThat(e.getMessage(), equalTo("Field must be a non-null, non-empty string")); e = expectThrows(IllegalArgumentException.class, () -> new CalendarInterval("", DateHistogramInterval.HOUR)); @@ -87,8 +89,7 @@ public void testDefaultTimeZone() { } public void testUnkownTimeZone() { - Exception e = expectThrows(ZoneRulesException.class, - () -> new CalendarInterval("foo", DateHistogramInterval.HOUR, null, "FOO")); + Exception e = expectThrows(ZoneRulesException.class, () -> new CalendarInterval("foo", DateHistogramInterval.HOUR, null, "FOO")); assertThat(e.getMessage(), equalTo("Unknown time-zone ID: FOO")); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index 7ab4f72721d7b..f8c0be9e586c2 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -77,13 +77,13 @@ public void testMissingFields() throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); - int numDocs = randomIntBetween(1,10); + int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { Document document = new Document(); long timestamp = new DateTime().minusDays(i).getMillis(); document.add(new SortedNumericDocValuesField(timestampField, timestamp)); document.add(new LongPoint(timestampField, timestamp)); - document.add(new SortedNumericDocValuesField(valueField, randomIntBetween(1,100))); + document.add(new SortedNumericDocValuesField(valueField, randomIntBetween(1, 100))); indexWriter.addDocument(document); } @@ -96,11 +96,14 @@ public void testMissingFields() throws IOException { MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); // Setup the composite agg - DateHistogramGroupConfig dateHistoGroupConfig - = new DateHistogramGroupConfig.CalendarInterval(timestampField, DateHistogramInterval.DAY); - CompositeAggregationBuilder compositeBuilder = - new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, - RollupIndexer.createValueSourceBuilders(dateHistoGroupConfig)); + DateHistogramGroupConfig dateHistoGroupConfig = new DateHistogramGroupConfig.CalendarInterval( + timestampField, + DateHistogramInterval.DAY + ); + CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( + RollupIndexer.AGGREGATION_NAME, + RollupIndexer.createValueSourceBuilders(dateHistoGroupConfig) + ); MetricConfig metricConfig = new MetricConfig("does_not_exist", singletonList("max")); List metricAgg = createAggregationBuilders(singletonList(metricConfig)); metricAgg.forEach(compositeBuilder::subAggregation); @@ -134,13 +137,13 @@ public void testCorrectFields() throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); - int numDocs = randomIntBetween(1,10); + int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { Document document = new Document(); long timestamp = new DateTime().minusDays(i).getMillis(); document.add(new SortedNumericDocValuesField(timestampField, timestamp)); document.add(new LongPoint(timestampField, timestamp)); - document.add(new SortedNumericDocValuesField(valueField, randomIntBetween(1,100))); + document.add(new SortedNumericDocValuesField(valueField, randomIntBetween(1, 100))); indexWriter.addDocument(document); } @@ -153,14 +156,15 @@ public void testCorrectFields() throws IOException { MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); // Setup the composite agg - //TODO swap this over to DateHistoConfig.Builder once DateInterval is in - DateHistogramValuesSourceBuilder dateHisto - = new DateHistogramValuesSourceBuilder("the_histo." + DateHistogramAggregationBuilder.NAME) - .field(timestampField) - .fixedInterval(new DateHistogramInterval("1ms")); + // TODO swap this over to DateHistoConfig.Builder once DateInterval is in + DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder( + "the_histo." + DateHistogramAggregationBuilder.NAME + ).field(timestampField).fixedInterval(new DateHistogramInterval("1ms")); - CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, - singletonList(dateHisto)); + CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( + RollupIndexer.AGGREGATION_NAME, + singletonList(dateHisto) + ); MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max")); List metricAgg = createAggregationBuilders(singletonList(metricConfig)); @@ -180,21 +184,21 @@ public void testCorrectFields() throws IOException { assertThat(docs.size(), equalTo(numDocs)); for (IndexRequest doc : docs) { Map map = doc.sourceAsMap(); - assertNotNull( map.get(valueField + "." + MaxAggregationBuilder.NAME + "." + RollupField.VALUE)); + assertNotNull(map.get(valueField + "." + MaxAggregationBuilder.NAME + "." + RollupField.VALUE)); assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); } } public void testNumericTerms() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupIndexerJobStats stats= new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); + RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String valueField = "the_avg"; Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); - int numDocs = randomIntBetween(1,10); + int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { Document document = new Document(); document.add(new SortedNumericDocValuesField(valueField, i)); @@ -210,10 +214,11 @@ public void testNumericTerms() throws IOException { MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); // Setup the composite agg - TermsValuesSourceBuilder terms - = new TermsValuesSourceBuilder("the_terms." + TermsAggregationBuilder.NAME).field(valueField); - CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, - singletonList(terms)); + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("the_terms." + TermsAggregationBuilder.NAME).field(valueField); + CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( + RollupIndexer.AGGREGATION_NAME, + singletonList(terms) + ); MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max")); List metricAgg = createAggregationBuilders(singletonList(metricConfig)); @@ -233,7 +238,7 @@ public void testNumericTerms() throws IOException { assertThat(docs.size(), equalTo(numDocs)); for (IndexRequest doc : docs) { Map map = doc.sourceAsMap(); - assertNotNull( map.get(valueField + "." + MaxAggregationBuilder.NAME + "." + RollupField.VALUE)); + assertNotNull(map.get(valueField + "." + MaxAggregationBuilder.NAME + "." + RollupField.VALUE)); assertThat(map.get("the_terms." + TermsAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); } } @@ -248,13 +253,13 @@ public void testEmptyCounts() throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); - int numDocs = randomIntBetween(1,10); + int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { Document document = new Document(); long timestamp = new DateTime().minusDays(i).getMillis(); document.add(new SortedNumericDocValuesField(timestampField, timestamp)); document.add(new LongPoint(timestampField, timestamp)); - document.add(new SortedNumericDocValuesField(valueField, randomIntBetween(1,100))); + document.add(new SortedNumericDocValuesField(valueField, randomIntBetween(1, 100))); indexWriter.addDocument(document); } @@ -267,13 +272,14 @@ public void testEmptyCounts() throws IOException { MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); // Setup the composite agg - DateHistogramValuesSourceBuilder dateHisto - = new DateHistogramValuesSourceBuilder("the_histo." + DateHistogramAggregationBuilder.NAME) - .field(timestampField) - .calendarInterval(new DateHistogramInterval("1d")); + DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder( + "the_histo." + DateHistogramAggregationBuilder.NAME + ).field(timestampField).calendarInterval(new DateHistogramInterval("1d")); - CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, - singletonList(dateHisto)); + CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( + RollupIndexer.AGGREGATION_NAME, + singletonList(dateHisto) + ); MetricConfig metricConfig = new MetricConfig("another_field", Arrays.asList("avg", "sum")); List metricAgg = createAggregationBuilders(singletonList(metricConfig)); @@ -458,9 +464,10 @@ public void testMissingBuckets() throws IOException { // Setup the composite agg TermsGroupConfig termsGroupConfig = new TermsGroupConfig(valueField); - CompositeAggregationBuilder compositeBuilder = - new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, RollupIndexer.createValueSourceBuilders(termsGroupConfig)) - .size(numDocs*2); + CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( + RollupIndexer.AGGREGATION_NAME, + RollupIndexer.createValueSourceBuilders(termsGroupConfig) + ).size(numDocs * 2); MetricConfig metricConfig = new MetricConfig(metricField, singletonList("max")); List metricAgg = createAggregationBuilders(singletonList(metricConfig)); @@ -524,15 +531,17 @@ public void testTimezone() throws IOException { MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); // Setup the composite agg - DateHistogramValuesSourceBuilder dateHisto - = new DateHistogramValuesSourceBuilder("the_histo." + DateHistogramAggregationBuilder.NAME) - .field(timestampField) + DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder( + "the_histo." + DateHistogramAggregationBuilder.NAME + ).field(timestampField) .calendarInterval(new DateHistogramInterval("1d")) // adds a timezone so that we aren't on default UTC .timeZone(ZoneId.of("-01:00", ZoneId.SHORT_IDS)); - CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, - singletonList(dateHisto)); + CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( + RollupIndexer.AGGREGATION_NAME, + singletonList(dateHisto) + ); MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max")); List metricAgg = createAggregationBuilders(singletonList(metricConfig)); @@ -555,15 +564,13 @@ public void testTimezone() throws IOException { assertNotNull(map.get(valueField + "." + MaxAggregationBuilder.NAME + "." + RollupField.VALUE)); assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); // 2015-09-30T00:00:00.000-01:00 - assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.TIMESTAMP), - equalTo(1443574800000L)); + assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.TIMESTAMP), equalTo(1443574800000L)); map = docs.get(1).sourceAsMap(); assertNotNull(map.get(valueField + "." + MaxAggregationBuilder.NAME + "." + RollupField.VALUE)); assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); // 2015-10-01T00:00:00.000-01:00 - assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.TIMESTAMP), - equalTo(1443661200000L)); + assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.TIMESTAMP), equalTo(1443661200000L)); } interface Mock { diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 5d4be0b747544..a2cbd85a4d6da 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -90,9 +90,27 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { @Before private void setup() { settings = createIndexSettings(); - searchExecutionContext = new SearchExecutionContext(0, 0, settings, - null, null, null, null, null, null, - null, null, null, null, () -> 0L, null, null, () -> true, null, emptyMap()); + searchExecutionContext = new SearchExecutionContext( + 0, + 0, + settings, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + () -> 0L, + null, + null, + () -> true, + null, + emptyMap() + ); } public void testSimpleDateHisto() throws Exception { @@ -101,39 +119,51 @@ public void testSimpleDateHisto() throws Exception { DateHistogramGroupConfig dateHistoConfig = new FixedInterval(field, new DateHistogramInterval("1ms")); RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.emptyList()); final List> dataset = new ArrayList<>(); - dataset.addAll( - Arrays.asList( - asMap("the_histo", 7L), - asMap("the_histo", 3L), - asMap("the_histo", 3L) - ) - ); + dataset.addAll(Arrays.asList(asMap("the_histo", 7L), asMap("the_histo", 3L), asMap("the_histo", 3L))); executeTestCase(dataset, job, System.currentTimeMillis(), (resp) -> { assertThat(resp.size(), equalTo(2)); IndexRequest request = resp.get(0); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", 3, - "the_histo.date_histogram.interval", "1ms", - "the_histo.date_histogram._count", 2, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + 3, + "the_histo.date_histogram.interval", + "1ms", + "the_histo.date_histogram._count", + 2, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); request = resp.get(1); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", 7, - "the_histo.date_histogram.interval", "1ms", - "the_histo.date_histogram._count", 1, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + 7, + "the_histo.date_histogram.interval", + "1ms", + "the_histo.date_histogram._count", + 1, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); }); } @@ -145,184 +175,289 @@ public void testDateHistoAndMetrics() throws Exception { RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.singletonList(config)); final List> dataset = new ArrayList<>(); dataset.addAll( - Arrays.asList( - asMap("the_histo", asLong("2015-03-31T03:00:00.000Z"), "counter", 10), - asMap("the_histo", asLong("2015-03-31T03:20:00.000Z"), "counter", 20), - asMap("the_histo", asLong("2015-03-31T03:40:00.000Z"), "counter", 20), - asMap("the_histo", asLong("2015-03-31T04:00:00.000Z"), "counter", 32), - asMap("the_histo", asLong("2015-03-31T04:20:00.000Z"), "counter", 54), - asMap("the_histo", asLong("2015-03-31T04:40:00.000Z"), "counter", 55), - asMap("the_histo", asLong("2015-03-31T05:00:00.000Z"), "counter", 55), - asMap("the_histo", asLong("2015-03-31T05:00:00.000Z"), "counter", 70), - asMap("the_histo", asLong("2015-03-31T05:20:00.000Z"), "counter", 70), - asMap("the_histo", asLong("2015-03-31T05:40:00.000Z"), "counter", 80), - asMap("the_histo", asLong("2015-03-31T06:00:00.000Z"), "counter", 80), - asMap("the_histo", asLong("2015-03-31T06:20:00.000Z"), "counter", 90), - asMap("the_histo", asLong("2015-03-31T06:40:00.000Z"), "counter", 100), - asMap("the_histo", asLong("2015-03-31T07:00:00.000Z"), "counter", 120), - asMap("the_histo", asLong("2015-03-31T07:20:00.000Z"), "counter", 120), - asMap("the_histo", asLong("2015-03-31T07:40:00.000Z"), "counter", 200) - ) + Arrays.asList( + asMap("the_histo", asLong("2015-03-31T03:00:00.000Z"), "counter", 10), + asMap("the_histo", asLong("2015-03-31T03:20:00.000Z"), "counter", 20), + asMap("the_histo", asLong("2015-03-31T03:40:00.000Z"), "counter", 20), + asMap("the_histo", asLong("2015-03-31T04:00:00.000Z"), "counter", 32), + asMap("the_histo", asLong("2015-03-31T04:20:00.000Z"), "counter", 54), + asMap("the_histo", asLong("2015-03-31T04:40:00.000Z"), "counter", 55), + asMap("the_histo", asLong("2015-03-31T05:00:00.000Z"), "counter", 55), + asMap("the_histo", asLong("2015-03-31T05:00:00.000Z"), "counter", 70), + asMap("the_histo", asLong("2015-03-31T05:20:00.000Z"), "counter", 70), + asMap("the_histo", asLong("2015-03-31T05:40:00.000Z"), "counter", 80), + asMap("the_histo", asLong("2015-03-31T06:00:00.000Z"), "counter", 80), + asMap("the_histo", asLong("2015-03-31T06:20:00.000Z"), "counter", 90), + asMap("the_histo", asLong("2015-03-31T06:40:00.000Z"), "counter", 100), + asMap("the_histo", asLong("2015-03-31T07:00:00.000Z"), "counter", 120), + asMap("the_histo", asLong("2015-03-31T07:20:00.000Z"), "counter", 120), + asMap("the_histo", asLong("2015-03-31T07:40:00.000Z"), "counter", 200) + ) ); executeTestCase(dataset, job, System.currentTimeMillis(), (resp) -> { assertThat(resp.size(), equalTo(5)); IndexRequest request = resp.get(0); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00.000Z"), - "the_histo.date_histogram.interval", "1h", - "the_histo.date_histogram._count", 3, - "counter.avg._count", 3.0, - "counter.avg.value", 50.0, - "counter.min.value", 10.0, - "counter.max.value", 20.0, - "counter.sum.value", 50.0, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + asLong("2015-03-31T03:00:00.000Z"), + "the_histo.date_histogram.interval", + "1h", + "the_histo.date_histogram._count", + 3, + "counter.avg._count", + 3.0, + "counter.avg.value", + 50.0, + "counter.min.value", + 10.0, + "counter.max.value", + 20.0, + "counter.sum.value", + 50.0, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); request = resp.get(1); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T04:00:00.000Z"), - "the_histo.date_histogram.interval", "1h", - "the_histo.date_histogram._count", 3, - "counter.avg._count", 3.0, - "counter.avg.value", 141.0, - "counter.min.value", 32.0, - "counter.max.value", 55.0, - "counter.sum.value", 141.0, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + asLong("2015-03-31T04:00:00.000Z"), + "the_histo.date_histogram.interval", + "1h", + "the_histo.date_histogram._count", + 3, + "counter.avg._count", + 3.0, + "counter.avg.value", + 141.0, + "counter.min.value", + 32.0, + "counter.max.value", + 55.0, + "counter.sum.value", + 141.0, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); request = resp.get(2); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T05:00:00.000Z"), - "the_histo.date_histogram.interval", "1h", - "the_histo.date_histogram._count", 4, - "counter.avg._count", 4.0, - "counter.avg.value", 275.0, - "counter.min.value", 55.0, - "counter.max.value", 80.0, - "counter.sum.value", 275.0, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + asLong("2015-03-31T05:00:00.000Z"), + "the_histo.date_histogram.interval", + "1h", + "the_histo.date_histogram._count", + 4, + "counter.avg._count", + 4.0, + "counter.avg.value", + 275.0, + "counter.min.value", + 55.0, + "counter.max.value", + 80.0, + "counter.sum.value", + 275.0, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); request = resp.get(3); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T06:00:00.000Z"), - "the_histo.date_histogram.interval", "1h", - "the_histo.date_histogram._count", 3, - "counter.avg._count", 3.0, - "counter.avg.value", 270.0, - "counter.min.value", 80.0, - "counter.max.value", 100.0, - "counter.sum.value", 270.0, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + asLong("2015-03-31T06:00:00.000Z"), + "the_histo.date_histogram.interval", + "1h", + "the_histo.date_histogram._count", + 3, + "counter.avg._count", + 3.0, + "counter.avg.value", + 270.0, + "counter.min.value", + 80.0, + "counter.max.value", + 100.0, + "counter.sum.value", + 270.0, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); request = resp.get(4); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T07:00:00.000Z"), - "the_histo.date_histogram.interval", "1h", - "the_histo.date_histogram._count", 3, - "counter.avg._count", 3.0, - "counter.avg.value", 440.0, - "counter.min.value", 120.0, - "counter.max.value", 200.0, - "counter.sum.value", 440.0, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + asLong("2015-03-31T07:00:00.000Z"), + "the_histo.date_histogram.interval", + "1h", + "the_histo.date_histogram._count", + 3, + "counter.avg._count", + 3.0, + "counter.avg.value", + 440.0, + "counter.min.value", + 120.0, + "counter.max.value", + 200.0, + "counter.sum.value", + 440.0, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); }); } public void testSimpleDateHistoWithDelay() throws Exception { String rollupIndex = randomAlphaOfLengthBetween(5, 10); String field = "the_histo"; - DateHistogramGroupConfig dateHistoConfig = - new FixedInterval(field, new DateHistogramInterval("1m"), new DateHistogramInterval("1h"), null); + DateHistogramGroupConfig dateHistoConfig = new FixedInterval( + field, + new DateHistogramInterval("1m"), + new DateHistogramInterval("1h"), + null + ); RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.emptyList()); final List> dataset = new ArrayList<>(); long now = System.currentTimeMillis(); dataset.addAll( - Arrays.asList( - asMap("the_histo", now - TimeValue.timeValueHours(5).getMillis()), - asMap("the_histo", now - TimeValue.timeValueHours(5).getMillis()), - asMap("the_histo", now - TimeValue.timeValueMinutes(75).getMillis()), - asMap("the_histo", now - TimeValue.timeValueMinutes(75).getMillis()), - asMap("the_histo", now - TimeValue.timeValueMinutes(61).getMillis()), - asMap("the_histo", now - TimeValue.timeValueHours(1).getMillis()), - asMap("the_histo", now - TimeValue.timeValueMinutes(10).getMillis()), - asMap("the_histo", now - TimeValue.timeValueMinutes(5).getMillis()), - asMap("the_histo", now - TimeValue.timeValueSeconds(1).getMillis()), - asMap("the_histo", now) - ) + Arrays.asList( + asMap("the_histo", now - TimeValue.timeValueHours(5).getMillis()), + asMap("the_histo", now - TimeValue.timeValueHours(5).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(75).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(75).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(61).getMillis()), + asMap("the_histo", now - TimeValue.timeValueHours(1).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(10).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(5).getMillis()), + asMap("the_histo", now - TimeValue.timeValueSeconds(1).getMillis()), + asMap("the_histo", now) + ) ); final Rounding.Prepared rounding = dateHistoConfig.createRounding(); executeTestCase(dataset, job, now, (resp) -> { assertThat(resp.size(), equalTo(3)); IndexRequest request = resp.get(0); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", rounding.round(now - TimeValue.timeValueHours(5).getMillis()), - "the_histo.date_histogram.interval", "1m", - "the_histo.date_histogram._count", 2, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + rounding.round(now - TimeValue.timeValueHours(5).getMillis()), + "the_histo.date_histogram.interval", + "1m", + "the_histo.date_histogram._count", + 2, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); request = resp.get(1); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", rounding.round(now - TimeValue.timeValueMinutes(75).getMillis()), - "the_histo.date_histogram.interval", "1m", - "the_histo.date_histogram._count", 2, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + rounding.round(now - TimeValue.timeValueMinutes(75).getMillis()), + "the_histo.date_histogram.interval", + "1m", + "the_histo.date_histogram._count", + 2, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); request = resp.get(2); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", rounding.round(now - TimeValue.timeValueMinutes(61).getMillis()), - "the_histo.date_histogram.interval", "1m", - "the_histo.date_histogram._count", 1, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + rounding.round(now - TimeValue.timeValueMinutes(61).getMillis()), + "the_histo.date_histogram.interval", + "1m", + "the_histo.date_histogram._count", + 1, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); }); } public void testSimpleDateHistoWithOverlappingDelay() throws Exception { String rollupIndex = randomAlphaOfLengthBetween(5, 10); String field = "the_histo"; - DateHistogramGroupConfig dateHistoConfig = - new FixedInterval(field, new DateHistogramInterval("1h"), new DateHistogramInterval("15m"), null); + DateHistogramGroupConfig dateHistoConfig = new FixedInterval( + field, + new DateHistogramInterval("1h"), + new DateHistogramInterval("15m"), + null + ); RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.emptyList()); final List> dataset = new ArrayList<>(); long now = asLong("2015-04-01T10:30:00.000Z"); @@ -345,28 +480,46 @@ public void testSimpleDateHistoWithOverlappingDelay() throws Exception { assertThat(resp.size(), equalTo(2)); IndexRequest request = resp.get(0); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( - asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", rounding.round(now - TimeValue.timeValueHours(2).getMillis()), - "the_histo.date_histogram.interval", "1h", - "the_histo.date_histogram._count", 3, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + assertThat( + request.sourceAsMap(), + equalTo( + asMap( + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + rounding.round(now - TimeValue.timeValueHours(2).getMillis()), + "the_histo.date_histogram.interval", + "1h", + "the_histo.date_histogram._count", + 3, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() + ) ) - )); + ); request = resp.get(1); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( - asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", rounding.round(now - TimeValue.timeValueHours(1).getMillis()), - "the_histo.date_histogram.interval", "1h", - "the_histo.date_histogram._count", 4, - "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "_rollup.id", job.getId() + assertThat( + request.sourceAsMap(), + equalTo( + asMap( + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + rounding.round(now - TimeValue.timeValueHours(1).getMillis()), + "the_histo.date_histogram.interval", + "1h", + "the_histo.date_histogram._count", + 4, + "the_histo.date_histogram.time_zone", + DateTimeZone.UTC.toString(), + "_rollup.id", + job.getId() + ) ) - )); + ); }); } @@ -374,67 +527,93 @@ public void testSimpleDateHistoWithTimeZone() throws Exception { final List> dataset = new ArrayList<>(); long now = asLong("2015-04-01T10:00:00.000Z"); dataset.addAll( - Arrays.asList( - asMap("the_histo", now - TimeValue.timeValueHours(10).getMillis()), - asMap("the_histo", now - TimeValue.timeValueHours(8).getMillis()), - asMap("the_histo", now - TimeValue.timeValueHours(6).getMillis()), - asMap("the_histo", now - TimeValue.timeValueMinutes(310).getMillis()), - asMap("the_histo", now - TimeValue.timeValueMinutes(305).getMillis()), - asMap("the_histo", now - TimeValue.timeValueMinutes(225).getMillis()), - asMap("the_histo", now) - ) + Arrays.asList( + asMap("the_histo", now - TimeValue.timeValueHours(10).getMillis()), + asMap("the_histo", now - TimeValue.timeValueHours(8).getMillis()), + asMap("the_histo", now - TimeValue.timeValueHours(6).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(310).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(305).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(225).getMillis()), + asMap("the_histo", now) + ) ); String timeZone = DateTimeZone.forOffsetHours(-3).getID(); String rollupIndex = randomAlphaOfLengthBetween(5, 10); String field = "the_histo"; - DateHistogramGroupConfig dateHistoConfig = new CalendarInterval(field, new DateHistogramInterval("1d"), null, timeZone); + DateHistogramGroupConfig dateHistoConfig = new CalendarInterval(field, new DateHistogramInterval("1d"), null, timeZone); RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.emptyList()); executeTestCase(dataset, job, now, (resp) -> { - assertThat(resp.size(), equalTo(1)); - IndexRequest request = resp.get(0); - assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( - asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00.000Z"), - "the_histo.date_histogram.interval", "1d", - "the_histo.date_histogram._count", 2, - "the_histo.date_histogram.time_zone", timeZone.toString(), - "_rollup.id", job.getId() - ) - )); - } - ); + assertThat(resp.size(), equalTo(1)); + IndexRequest request = resp.get(0); + assertThat(request.index(), equalTo(rollupIndex)); + assertThat( + request.sourceAsMap(), + equalTo( + asMap( + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + asLong("2015-03-31T03:00:00.000Z"), + "the_histo.date_histogram.interval", + "1d", + "the_histo.date_histogram._count", + 2, + "the_histo.date_histogram.time_zone", + timeZone.toString(), + "_rollup.id", + job.getId() + ) + ) + ); + }); long nowPlusOneDay = now + TimeValue.timeValueHours(24).millis(); executeTestCase(dataset, job, nowPlusOneDay, (resp) -> { assertThat(resp.size(), equalTo(2)); IndexRequest request = resp.get(0); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00.000Z"), - "the_histo.date_histogram.interval", "1d", - "the_histo.date_histogram._count", 2, - "the_histo.date_histogram.time_zone", timeZone.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + asLong("2015-03-31T03:00:00.000Z"), + "the_histo.date_histogram.interval", + "1d", + "the_histo.date_histogram._count", + 2, + "the_histo.date_histogram.time_zone", + timeZone.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); request = resp.get(1); assertThat(request.index(), equalTo(rollupIndex)); - assertThat(request.sourceAsMap(), equalTo( + assertThat( + request.sourceAsMap(), + equalTo( asMap( - "_rollup.version", 2, - "the_histo.date_histogram.timestamp", asLong("2015-04-01T03:00:00.000Z"), - "the_histo.date_histogram.interval", "1d", - "the_histo.date_histogram._count", 5, - "the_histo.date_histogram.time_zone", timeZone.toString(), - "_rollup.id", job.getId() + "_rollup.version", + 2, + "the_histo.date_histogram.timestamp", + asLong("2015-04-01T03:00:00.000Z"), + "the_histo.date_histogram.interval", + "1d", + "the_histo.date_histogram._count", + 5, + "the_histo.date_histogram.time_zone", + timeZone.toString(), + "_rollup.id", + job.getId() ) - )); + ) + ); }); } @@ -445,17 +624,16 @@ public void testRandomizedDateHisto() throws Exception { String valueField = "the_avg"; String timeInterval = randomIntBetween(2, 10) + randomFrom("h", "m"); - DateHistogramGroupConfig dateHistoConfig = - new FixedInterval(timestampField, new DateHistogramInterval(timeInterval)); + DateHistogramGroupConfig dateHistoConfig = new FixedInterval(timestampField, new DateHistogramInterval(timeInterval)); MetricConfig metricConfig = new MetricConfig(valueField, Collections.singletonList("avg")); RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.singletonList(metricConfig)); final List> dataset = new ArrayList<>(); - int numDocs = randomIntBetween(1,100); + int numDocs = randomIntBetween(1, 100); for (int i = 0; i < numDocs; i++) { // Make sure the timestamp is sufficiently in the past that we don't get bitten // by internal rounding, causing no docs to match - long timestamp = new DateTime().minusDays(2).minusHours(randomIntBetween(11,100)).getMillis(); + long timestamp = new DateTime().minusDays(2).minusHours(randomIntBetween(11, 100)).getMillis(); dataset.add(asMap(timestampField, timestamp, valueField, randomLongBetween(1, 100))); } executeTestCase(dataset, job, System.currentTimeMillis(), (resp) -> { @@ -477,16 +655,24 @@ public void testRandomizedDateHisto() throws Exception { } private RollupJobConfig createJob(String rollupIndex, GroupConfig groupConfig, List metricConfigs) { - return new RollupJobConfig(randomAlphaOfLength(10), randomAlphaOfLength(10), rollupIndex, ConfigTestHelpers.randomCron(), - randomIntBetween(1, 100), groupConfig, metricConfigs, ConfigTestHelpers.randomTimeout(random())); + return new RollupJobConfig( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + rollupIndex, + ConfigTestHelpers.randomCron(), + randomIntBetween(1, 100), + groupConfig, + metricConfigs, + ConfigTestHelpers.randomTimeout(random()) + ); } static Map asMap(Object... fields) { assert fields.length % 2 == 0; final Map map = new HashMap<>(); - for (int i = 0; i < fields.length; i+=2) { + for (int i = 0; i < fields.length; i += 2) { String field = (String) fields[i]; - map.put(field, fields[i+1]); + map.put(field, fields[i + 1]); } return map; } @@ -502,8 +688,12 @@ private static long asLong(String dateTime) { * @param now The current time in milliseconds * @param rollupConsumer The consumer that checks the created rollup documents for the job */ - private void executeTestCase(List> docs, RollupJobConfig config, long now, - Consumer> rollupConsumer) throws Exception { + private void executeTestCase( + List> docs, + RollupJobConfig config, + long now, + Consumer> rollupConsumer + ) throws Exception { Map fieldTypeLookup = createFieldTypes(config); Directory dir = index(docs, fieldTypeLookup); IndexReader reader = DirectoryReader.open(dir); @@ -513,8 +703,13 @@ private void executeTestCase(List> docs, RollupJobConfig con try { RollupJob job = new RollupJob(config, Collections.emptyMap()); - final SyncRollupIndexer action = new SyncRollupIndexer(threadPool, job, searcher, - fieldTypeLookup.values().toArray(new MappedFieldType[0]), fieldTypeLookup.get(dateHistoField)); + final SyncRollupIndexer action = new SyncRollupIndexer( + threadPool, + job, + searcher, + fieldTypeLookup.values().toArray(new MappedFieldType[0]), + fieldTypeLookup.get(dateHistoField) + ); rollupConsumer.accept(action.triggerAndWaitForCompletion(now)); } finally { ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); @@ -537,18 +732,16 @@ private Map createFieldTypes(RollupJobConfig job) { if (job.getGroupConfig().getHistogram() != null) { for (String field : job.getGroupConfig().getHistogram().getFields()) { - MappedFieldType ft = new NumberFieldMapper.Builder(field, NumberType.LONG, ScriptCompiler.NONE, false, false) - .build(new ContentPath(0)) - .fieldType(); + MappedFieldType ft = new NumberFieldMapper.Builder(field, NumberType.LONG, ScriptCompiler.NONE, false, false).build( + new ContentPath(0) + ).fieldType(); fieldTypes.put(ft.name(), ft); } } if (job.getGroupConfig().getTerms() != null) { for (String field : job.getGroupConfig().getTerms().getFields()) { - MappedFieldType ft = new KeywordFieldMapper.Builder(field) - .build(new ContentPath(0)) - .fieldType(); + MappedFieldType ft = new KeywordFieldMapper.Builder(field).build(new ContentPath(0)).fieldType(); fieldTypes.put(ft.name(), ft); } } @@ -556,8 +749,8 @@ private Map createFieldTypes(RollupJobConfig job) { if (job.getMetricsConfig() != null) { for (MetricConfig metric : job.getMetricsConfig()) { MappedFieldType ft = new NumberFieldMapper.Builder(metric.getField(), NumberType.LONG, ScriptCompiler.NONE, false, false) - .build(new ContentPath(0)) - .fieldType(); + .build(new ContentPath(0)) + .fieldType(); fieldTypes.put(ft.name(), ft); } } @@ -610,8 +803,13 @@ class SyncRollupIndexer extends RollupIndexer { private final CountDownLatch latch = new CountDownLatch(1); private Exception exc; - SyncRollupIndexer(ThreadPool threadPool, RollupJob job, IndexSearcher searcher, - MappedFieldType[] fieldTypes, MappedFieldType timestampField) { + SyncRollupIndexer( + ThreadPool threadPool, + RollupJob job, + IndexSearcher searcher, + MappedFieldType[] fieldTypes, + MappedFieldType timestampField + ) { super(threadPool, job, new AtomicReference<>(IndexerState.STARTED), null); this.searcher = searcher; this.fieldTypes = fieldTypes; @@ -644,15 +842,28 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener assertThat(request.source().query(), instanceOf(RangeQueryBuilder.class)); RangeQueryBuilder range = (RangeQueryBuilder) request.source().query(); final ZoneId timeZone = range.timeZone() != null ? ZoneId.of(range.timeZone()) : null; - Query query = timestampField.rangeQuery(range.from(), range.to(), range.includeLower(), range.includeUpper(), - null, timeZone, DateFormatter.forPattern(range.format()).toDateMathParser(), searchExecutionContext); + Query query = timestampField.rangeQuery( + range.from(), + range.to(), + range.includeLower(), + range.includeUpper(), + null, + timeZone, + DateFormatter.forPattern(range.format()).toDateMathParser(), + searchExecutionContext + ); // extract composite agg assertThat(request.source().aggregations().getAggregatorFactories().size(), equalTo(1)); - assertThat(request.source().aggregations().getAggregatorFactories().iterator().next(), - instanceOf(CompositeAggregationBuilder.class)); - CompositeAggregationBuilder aggBuilder = - (CompositeAggregationBuilder) request.source().aggregations().getAggregatorFactories().iterator().next(); + assertThat( + request.source().aggregations().getAggregatorFactories().iterator().next(), + instanceOf(CompositeAggregationBuilder.class) + ); + CompositeAggregationBuilder aggBuilder = (CompositeAggregationBuilder) request.source() + .aggregations() + .getAggregatorFactories() + .iterator() + .next(); CompositeAggregation result = null; try { @@ -660,10 +871,16 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener } catch (IOException e) { listener.onFailure(e); } - SearchResponseSections sections = new SearchResponseSections(null, new Aggregations(Collections.singletonList(result)), - null, false, null, null, 1); - SearchResponse response = new SearchResponse(sections, null, 1, 1, - 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); + SearchResponseSections sections = new SearchResponseSections( + null, + new Aggregations(Collections.singletonList(result)), + null, + false, + null, + null, + 1 + ); + SearchResponse response = new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); listener.onResponse(response); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java index aa038b7b3050d..fa36733d37a96 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java @@ -50,16 +50,24 @@ import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.spy; - public class RollupIndexerStateTests extends ESTestCase { private static class EmptyRollupIndexer extends RollupIndexer { - EmptyRollupIndexer(ThreadPool threadPool, RollupJob job, AtomicReference initialState, - Map initialPosition, RollupIndexerJobStats stats) { + EmptyRollupIndexer( + ThreadPool threadPool, + RollupJob job, + AtomicReference initialState, + Map initialPosition, + RollupIndexerJobStats stats + ) { super(threadPool, job, initialState, initialPosition, stats); } - EmptyRollupIndexer(ThreadPool threadPool, RollupJob job, AtomicReference initialState, - Map initialPosition) { + EmptyRollupIndexer( + ThreadPool threadPool, + RollupJob job, + AtomicReference initialState, + Map initialPosition + ) { super(threadPool, job, initialState, initialPosition); } @@ -99,9 +107,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws })); final SearchResponseSections sections = new SearchResponseSections( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), - aggs, null, false, null, null, 1); - final SearchResponse response = new SearchResponse(sections, null, 1, 1, 0, 0, - new ShardSearchFailure[0], null); + aggs, + null, + false, + null, + null, + 1 + ); + final SearchResponse response = new SearchResponse(sections, null, 1, 1, 0, 0, new ShardSearchFailure[0], null); nextPhase.onResponse(response); } @@ -135,13 +148,22 @@ protected void onFinish(ActionListener listener) { private static class DelayedEmptyRollupIndexer extends EmptyRollupIndexer { protected CountDownLatch latch; - DelayedEmptyRollupIndexer(ThreadPool threadPool, RollupJob job, AtomicReference initialState, - Map initialPosition) { + DelayedEmptyRollupIndexer( + ThreadPool threadPool, + RollupJob job, + AtomicReference initialState, + Map initialPosition + ) { super(threadPool, job, initialState, initialPosition); } - DelayedEmptyRollupIndexer(ThreadPool threadPool, RollupJob job, AtomicReference initialState, - Map initialPosition, RollupIndexerJobStats stats) { + DelayedEmptyRollupIndexer( + ThreadPool threadPool, + RollupJob job, + AtomicReference initialState, + Map initialPosition, + RollupIndexerJobStats stats + ) { super(threadPool, job, initialState, initialPosition, stats); } @@ -168,16 +190,28 @@ private static class NonEmptyRollupIndexer extends RollupIndexer { final BiConsumer> saveStateCheck; private CountDownLatch latch; - NonEmptyRollupIndexer(ThreadPool threadPool, RollupJob job, AtomicReference initialState, - Map initialPosition, Function searchFunction, - Function bulkFunction, Consumer failureConsumer) { + NonEmptyRollupIndexer( + ThreadPool threadPool, + RollupJob job, + AtomicReference initialState, + Map initialPosition, + Function searchFunction, + Function bulkFunction, + Consumer failureConsumer + ) { this(threadPool, job, initialState, initialPosition, searchFunction, bulkFunction, failureConsumer, (i, m) -> {}); } - NonEmptyRollupIndexer(ThreadPool threadPool, RollupJob job, AtomicReference initialState, - Map initialPosition, Function searchFunction, - Function bulkFunction, Consumer failureConsumer, - BiConsumer> saveStateCheck) { + NonEmptyRollupIndexer( + ThreadPool threadPool, + RollupJob job, + AtomicReference initialState, + Map initialPosition, + Function searchFunction, + Function bulkFunction, + Consumer failureConsumer, + BiConsumer> saveStateCheck + ) { super(threadPool, job, initialState, initialPosition); this.searchFunction = searchFunction; this.bulkFunction = bulkFunction; @@ -271,9 +305,7 @@ public void testIndexing() throws Exception { DelayedEmptyRollupIndexer indexer = new DelayedEmptyRollupIndexer(threadPool, job, state, null) { @Override protected void onFinish(ActionListener listener) { - super.onFinish(ActionListener.wrap(r -> { - listener.onResponse(r); - }, listener::onFailure)); + super.onFinish(ActionListener.wrap(r -> { listener.onResponse(r); }, listener::onFailure)); } @Override @@ -326,8 +358,7 @@ public void testStateChangeMidTrigger() { final ThreadPool threadPool = new TestThreadPool(getTestName()); try { AtomicBoolean isFinished = new AtomicBoolean(false); - DelayedEmptyRollupIndexer indexer = - new DelayedEmptyRollupIndexer(threadPool, job, state, null, spyStats) { + DelayedEmptyRollupIndexer indexer = new DelayedEmptyRollupIndexer(threadPool, job, state, null, spyStats) { @Override protected void onFinish(ActionListener listener) { super.onFinish(ActionListener.wrap(r -> { @@ -456,9 +487,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws })); final SearchResponseSections sections = new SearchResponseSections( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), - aggs, null, false, null, null, 1); - final SearchResponse response = new SearchResponse(sections, null, 1, 1, 0, 0, - ShardSearchFailure.EMPTY_ARRAY, null); + aggs, + null, + false, + null, + null, + 1 + ); + final SearchResponse response = new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); nextPhase.onResponse(response); } @@ -662,16 +698,19 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws })); final SearchResponseSections sections = new SearchResponseSections( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), - aggs, null, false, null, null, 1); - return new SearchResponse(sections, null, 1, 1, 0, 0, - ShardSearchFailure.EMPTY_ARRAY, null); + aggs, + null, + false, + null, + null, + 1 + ); + return new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); - Consumer failureConsumer = e -> { - assertThat(e.getMessage(), equalTo("Could not identify key in agg [foo]")); - }; + Consumer failureConsumer = e -> { assertThat(e.getMessage(), equalTo("Could not identify key in agg [foo]")); }; BiConsumer> stateCheck = (i, p) -> { if (i == IndexerState.STARTED) { isFinished.set(true); @@ -681,8 +720,16 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws final ThreadPool threadPool = new TestThreadPool(getTestName()); try { - NonEmptyRollupIndexer indexer = new NonEmptyRollupIndexer(threadPool, job, state, null, - searchFunction, bulkFunction, failureConsumer, stateCheck); + NonEmptyRollupIndexer indexer = new NonEmptyRollupIndexer( + threadPool, + job, + state, + null, + searchFunction, + bulkFunction, + failureConsumer, + stateCheck + ); final CountDownLatch latch = indexer.newLatch(1); indexer.start(); assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); @@ -775,26 +822,35 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws })); final SearchResponseSections sections = new SearchResponseSections( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), - aggs, null, false, null, null, 1); - return new SearchResponse(sections, null, 1, 1, 0, 0, - ShardSearchFailure.EMPTY_ARRAY, null); + aggs, + null, + false, + null, + null, + 1 + ); + return new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); - Consumer failureConsumer = e -> { - assertThat(e.getMessage(), equalTo("Could not identify key in agg [foo]")); - }; + Consumer failureConsumer = e -> { assertThat(e.getMessage(), equalTo("Could not identify key in agg [foo]")); }; - BiConsumer> doSaveStateCheck = (indexerState, position) -> { - isFinished.set(true); - }; + BiConsumer> doSaveStateCheck = (indexerState, position) -> { isFinished.set(true); }; final ThreadPool threadPool = new TestThreadPool(getTestName()); try { - NonEmptyRollupIndexer indexer = new NonEmptyRollupIndexer(threadPool, job, state, null, - searchFunction, bulkFunction, failureConsumer, doSaveStateCheck); + NonEmptyRollupIndexer indexer = new NonEmptyRollupIndexer( + threadPool, + job, + state, + null, + searchFunction, + bulkFunction, + failureConsumer, + doSaveStateCheck + ); final CountDownLatch latch = indexer.newLatch(1); indexer.start(); assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); @@ -823,15 +879,16 @@ public void testSearchShardFailure() throws Exception { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); Function searchFunction = searchRequest -> { - throw new SearchPhaseExecutionException("query", "Partial shards failure", - new ShardSearchFailure[] { new ShardSearchFailure(new RuntimeException("failed")) }); + throw new SearchPhaseExecutionException( + "query", + "Partial shards failure", + new ShardSearchFailure[] { new ShardSearchFailure(new RuntimeException("failed")) } + ); }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); - Consumer failureConsumer = e -> { - assertThat(e.getMessage(), startsWith("Partial shards failure")); - }; + Consumer failureConsumer = e -> { assertThat(e.getMessage(), startsWith("Partial shards failure")); }; BiConsumer> stateCheck = (i, p) -> { if (i == IndexerState.STARTED) { isFinished.set(true); @@ -841,8 +898,16 @@ public void testSearchShardFailure() throws Exception { final ThreadPool threadPool = new TestThreadPool(getTestName()); try { - NonEmptyRollupIndexer indexer = new NonEmptyRollupIndexer(threadPool, job, state, null, - searchFunction, bulkFunction, failureConsumer, stateCheck); + NonEmptyRollupIndexer indexer = new NonEmptyRollupIndexer( + threadPool, + job, + state, + null, + searchFunction, + bulkFunction, + failureConsumer, + stateCheck + ); final CountDownLatch latch = indexer.newLatch(1); indexer.start(); assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); @@ -932,9 +997,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws })); final SearchResponseSections sections = new SearchResponseSections( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), - aggs, null, false, null, null, 1); - return new SearchResponse(sections, null, 1, 1, 0, 0, - ShardSearchFailure.EMPTY_ARRAY, null); + aggs, + null, + false, + null, + null, + 1 + ); + return new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); }; Function bulkFunction = bulkRequest -> { @@ -942,9 +1012,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return null; }; - Consumer failureConsumer = e -> { - assertThat(e.getMessage(), equalTo("failed")); - }; + Consumer failureConsumer = e -> { assertThat(e.getMessage(), equalTo("failed")); }; BiConsumer> stateCheck = (i, p) -> { if (i == IndexerState.STARTED) { isFinished.set(true); @@ -954,8 +1022,16 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws final ThreadPool threadPool = new TestThreadPool(getTestName()); try { - NonEmptyRollupIndexer indexer = new NonEmptyRollupIndexer(threadPool, job, state, null, - searchFunction, bulkFunction, failureConsumer, stateCheck) { + NonEmptyRollupIndexer indexer = new NonEmptyRollupIndexer( + threadPool, + job, + state, + null, + searchFunction, + bulkFunction, + failureConsumer, + stateCheck + ) { @Override protected void doNextBulk(BulkRequest request, ActionListener nextPhase) { nextPhase.onFailure(new RuntimeException("failed")); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerTests.java index 447702f6b7479..62ad949b7e762 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerTests.java @@ -47,4 +47,3 @@ public void testCreateMetadata() { assertThat(value, equalTo(histogram.getInterval())); } } - diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java index 5e4401a8f8494..927f9079513d9 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java @@ -55,9 +55,7 @@ public class RollupJobTaskTests extends ESTestCase { - private static final Settings SETTINGS = Settings.builder() - .put(Node.NODE_NAME_SETTING.getKey(), "test") - .build(); + private static final Settings SETTINGS = Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "test").build(); private ThreadPool pool; @@ -78,12 +76,22 @@ public void testInitialStatusStopped() { when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC()); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()); + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ); task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertThat(((RollupJobStatus)task.getStatus()).getPosition().size(), equalTo(1)); - assertTrue(((RollupJobStatus)task.getStatus()).getPosition().containsKey("foo")); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertThat(((RollupJobStatus) task.getStatus()).getPosition().size(), equalTo(1)); + assertTrue(((RollupJobStatus) task.getStatus()).getPosition().containsKey("foo")); } public void testInitialStatusAborting() { @@ -93,12 +101,22 @@ public void testInitialStatusAborting() { when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC()); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()); + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ); task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertThat(((RollupJobStatus)task.getStatus()).getPosition().size(), equalTo(1)); - assertTrue(((RollupJobStatus)task.getStatus()).getPosition().containsKey("foo")); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertThat(((RollupJobStatus) task.getStatus()).getPosition().size(), equalTo(1)); + assertTrue(((RollupJobStatus) task.getStatus()).getPosition().containsKey("foo")); } public void testInitialStatusStopping() { @@ -108,12 +126,22 @@ public void testInitialStatusStopping() { when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC()); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()); + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ); task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertThat(((RollupJobStatus)task.getStatus()).getPosition().size(), equalTo(1)); - assertTrue(((RollupJobStatus)task.getStatus()).getPosition().containsKey("foo")); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertThat(((RollupJobStatus) task.getStatus()).getPosition().size(), equalTo(1)); + assertTrue(((RollupJobStatus) task.getStatus()).getPosition().containsKey("foo")); } public void testInitialStatusStarted() { @@ -123,12 +151,22 @@ public void testInitialStatusStarted() { when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC()); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()); + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ); task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); - assertThat(((RollupJobStatus)task.getStatus()).getPosition().size(), equalTo(1)); - assertTrue(((RollupJobStatus)task.getStatus()).getPosition().containsKey("foo")); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); + assertThat(((RollupJobStatus) task.getStatus()).getPosition().size(), equalTo(1)); + assertTrue(((RollupJobStatus) task.getStatus()).getPosition().containsKey("foo")); } public void testInitialStatusIndexingOldID() { @@ -138,12 +176,22 @@ public void testInitialStatusIndexingOldID() { when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC()); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()); + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ); task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); - assertThat(((RollupJobStatus)task.getStatus()).getPosition().size(), equalTo(1)); - assertTrue(((RollupJobStatus)task.getStatus()).getPosition().containsKey("foo")); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); + assertThat(((RollupJobStatus) task.getStatus()).getPosition().size(), equalTo(1)); + assertTrue(((RollupJobStatus) task.getStatus()).getPosition().containsKey("foo")); } public void testInitialStatusIndexingNewID() { @@ -153,12 +201,22 @@ public void testInitialStatusIndexingNewID() { when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC()); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()); + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ); task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); - assertThat(((RollupJobStatus)task.getStatus()).getPosition().size(), equalTo(1)); - assertTrue(((RollupJobStatus)task.getStatus()).getPosition().containsKey("foo")); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); + assertThat(((RollupJobStatus) task.getStatus()).getPosition().size(), equalTo(1)); + assertTrue(((RollupJobStatus) task.getStatus()).getPosition().containsKey("foo")); } public void testNoInitialStatus() { @@ -167,11 +225,21 @@ public void testNoInitialStatus() { when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC()); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - null, client, schedulerEngine, pool, Collections.emptyMap()); + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + null, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ); task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertNull(((RollupJobStatus)task.getStatus()).getPosition()); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertNull(((RollupJobStatus) task.getStatus()).getPosition()); } public void testStartWhenStarted() throws InterruptedException { @@ -181,12 +249,22 @@ public void testStartWhenStarted() throws InterruptedException { when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC()); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()); + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ); task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); - assertThat(((RollupJobStatus)task.getStatus()).getPosition().size(), equalTo(1)); - assertTrue(((RollupJobStatus)task.getStatus()).getPosition().containsKey("foo")); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); + assertThat(((RollupJobStatus) task.getStatus()).getPosition().size(), equalTo(1)); + assertTrue(((RollupJobStatus) task.getStatus()).getPosition().containsKey("foo")); CountDownLatch latch = new CountDownLatch(1); task.start(new ActionListener<>() { @@ -214,11 +292,23 @@ public void testStartWhenStopping() throws InterruptedException { AtomicInteger counter = new AtomicInteger(0); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - null, client, schedulerEngine, pool, Collections.emptyMap()) { + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + null, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ) { @Override - public void updatePersistentTaskState(PersistentTaskState taskState, - ActionListener> listener) { + public void updatePersistentTaskState( + PersistentTaskState taskState, + ActionListener> listener + ) { assertThat(taskState, instanceOf(RollupJobStatus.class)); int c = counter.get(); if (c == 0) { @@ -230,21 +320,28 @@ public void updatePersistentTaskState(PersistentTaskState taskState, } else { fail("Should not have updated persistent statuses > 3 times"); } - listener.onResponse(new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, - new PersistentTasksCustomMetadata.Assignment("foo", "foo"))); + listener.onResponse( + new PersistentTasksCustomMetadata.PersistentTask<>( + "foo", + RollupField.TASK_NAME, + job, + 1, + new PersistentTasksCustomMetadata.Assignment("foo", "foo") + ) + ); counter.incrementAndGet(); } }; task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertNull(((RollupJobStatus)task.getStatus()).getPosition()); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertNull(((RollupJobStatus) task.getStatus()).getPosition()); CountDownLatch latch = new CountDownLatch(1); task.start(new ActionListener() { @Override public void onResponse(StartRollupJobAction.Response response) { assertTrue(response.isStarted()); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); latch.countDown(); } @@ -256,7 +353,7 @@ public void onFailure(Exception e) { assertUnblockIn10s(latch); task.triggered(new SchedulerEngine.Event(RollupJobTask.SCHEDULE_NAME + "_" + job.getConfig().getId(), 123, 123)); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); assertThat(task.getStats().getNumInvocations(), equalTo(1L)); // wait until the search request is send, this is unblocked in the client @@ -285,8 +382,10 @@ public void onResponse(StartRollupJobAction.Response response) { @Override public void onFailure(Exception e) { - assertThat(e.getMessage(), equalTo("Cannot start task for Rollup Job [" - + job.getConfig().getId() + "] because state was [STOPPING]")); + assertThat( + e.getMessage(), + equalTo("Cannot start task for Rollup Job [" + job.getConfig().getId() + "] because state was [STOPPING]") + ); latch2.countDown(); } }); @@ -304,28 +403,47 @@ public void testStartWhenStopped() throws InterruptedException { when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = mock(SchedulerEngine.class); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()) { + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ) { @Override - public void updatePersistentTaskState(PersistentTaskState taskState, - ActionListener> listener) { + public void updatePersistentTaskState( + PersistentTaskState taskState, + ActionListener> listener + ) { assertThat(taskState, instanceOf(RollupJobStatus.class)); assertThat(((RollupJobStatus) taskState).getIndexerState(), equalTo(IndexerState.STARTED)); - listener.onResponse(new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, - new PersistentTasksCustomMetadata.Assignment("foo", "foo"))); + listener.onResponse( + new PersistentTasksCustomMetadata.PersistentTask<>( + "foo", + RollupField.TASK_NAME, + job, + 1, + new PersistentTasksCustomMetadata.Assignment("foo", "foo") + ) + ); } }; task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertThat(((RollupJobStatus)task.getStatus()).getPosition().size(), equalTo(1)); - assertTrue(((RollupJobStatus)task.getStatus()).getPosition().containsKey("foo")); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertThat(((RollupJobStatus) task.getStatus()).getPosition().size(), equalTo(1)); + assertTrue(((RollupJobStatus) task.getStatus()).getPosition().containsKey("foo")); CountDownLatch latch = new CountDownLatch(1); task.start(new ActionListener() { @Override public void onResponse(StartRollupJobAction.Response response) { assertTrue(response.isStarted()); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); latch.countDown(); } @@ -344,28 +462,47 @@ public void testTriggerUnrelated() throws InterruptedException { when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = mock(SchedulerEngine.class); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()) { + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ) { @Override - public void updatePersistentTaskState(PersistentTaskState taskState, - ActionListener> listener) { + public void updatePersistentTaskState( + PersistentTaskState taskState, + ActionListener> listener + ) { assertThat(taskState, instanceOf(RollupJobStatus.class)); assertThat(((RollupJobStatus) taskState).getIndexerState(), equalTo(IndexerState.STARTED)); - listener.onResponse(new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, - new PersistentTasksCustomMetadata.Assignment("foo", "foo"))); + listener.onResponse( + new PersistentTasksCustomMetadata.PersistentTask<>( + "foo", + RollupField.TASK_NAME, + job, + 1, + new PersistentTasksCustomMetadata.Assignment("foo", "foo") + ) + ); } }; task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertThat(((RollupJobStatus)task.getStatus()).getPosition().size(), equalTo(1)); - assertTrue(((RollupJobStatus)task.getStatus()).getPosition().containsKey("foo")); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertThat(((RollupJobStatus) task.getStatus()).getPosition().size(), equalTo(1)); + assertTrue(((RollupJobStatus) task.getStatus()).getPosition().containsKey("foo")); CountDownLatch latch = new CountDownLatch(1); task.start(new ActionListener() { @Override public void onResponse(StartRollupJobAction.Response response) { assertTrue(response.isStarted()); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); latch.countDown(); } @@ -377,7 +514,7 @@ public void onFailure(Exception e) { latch.await(3, TimeUnit.SECONDS); task.triggered(new SchedulerEngine.Event("unrelated", 123, 123)); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); } public void testTrigger() throws InterruptedException { @@ -387,27 +524,46 @@ public void testTrigger() throws InterruptedException { when(client.threadPool()).thenReturn(pool); SchedulerEngine schedulerEngine = mock(SchedulerEngine.class); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - null, client, schedulerEngine, pool, Collections.emptyMap()) { + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + null, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ) { @Override - public void updatePersistentTaskState(PersistentTaskState taskState, - ActionListener> listener) { + public void updatePersistentTaskState( + PersistentTaskState taskState, + ActionListener> listener + ) { assertThat(taskState, instanceOf(RollupJobStatus.class)); assertThat(((RollupJobStatus) taskState).getIndexerState(), equalTo(IndexerState.STARTED)); - listener.onResponse(new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, - new PersistentTasksCustomMetadata.Assignment("foo", "foo"))); + listener.onResponse( + new PersistentTasksCustomMetadata.PersistentTask<>( + "foo", + RollupField.TASK_NAME, + job, + 1, + new PersistentTasksCustomMetadata.Assignment("foo", "foo") + ) + ); } }; task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertNull(((RollupJobStatus)task.getStatus()).getPosition()); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertNull(((RollupJobStatus) task.getStatus()).getPosition()); CountDownLatch latch = new CountDownLatch(1); task.start(new ActionListener() { @Override public void onResponse(StartRollupJobAction.Response response) { assertTrue(response.isStarted()); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); latch.countDown(); } @@ -419,7 +575,7 @@ public void onFailure(Exception e) { latch.await(3, TimeUnit.SECONDS); task.triggered(new SchedulerEngine.Event(RollupJobTask.SCHEDULE_NAME + "_" + job.getConfig().getId(), 123, 123)); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); assertThat(task.getStats().getNumInvocations(), equalTo(1L)); } @@ -451,23 +607,42 @@ public void testTriggerWithoutHeaders() throws Exception { // Wait before progressing latch.await(); - ((ActionListener)invocationOnMock.getArguments()[2]).onResponse(r); + ((ActionListener) invocationOnMock.getArguments()[2]).onResponse(r); return null; }).when(client).execute(anyObject(), anyObject(), anyObject()); SchedulerEngine schedulerEngine = mock(SchedulerEngine.class); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - null, client, schedulerEngine, pool, Collections.emptyMap()) { + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + null, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ) { @Override - public void updatePersistentTaskState(PersistentTaskState taskState, - ActionListener> listener) { + public void updatePersistentTaskState( + PersistentTaskState taskState, + ActionListener> listener + ) { Integer counterValue = counter.getAndIncrement(); if (counterValue == 0) { assertThat(taskState, instanceOf(RollupJobStatus.class)); assertThat(((RollupJobStatus) taskState).getIndexerState(), equalTo(IndexerState.STARTED)); - listener.onResponse(new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, - new PersistentTasksCustomMetadata.Assignment("foo", "foo"))); + listener.onResponse( + new PersistentTasksCustomMetadata.PersistentTask<>( + "foo", + RollupField.TASK_NAME, + job, + 1, + new PersistentTasksCustomMetadata.Assignment("foo", "foo") + ) + ); } else if (counterValue == 1) { finished.set(true); } @@ -475,8 +650,8 @@ public void updatePersistentTaskState(PersistentTaskState taskState, } }; task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertNull(((RollupJobStatus)task.getStatus()).getPosition()); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertNull(((RollupJobStatus) task.getStatus()).getPosition()); task.start(new ActionListener<>() { @Override @@ -494,7 +669,7 @@ public void onFailure(Exception e) { assertBusy(() -> assertTrue(started.get())); task.triggered(new SchedulerEngine.Event(RollupJobTask.SCHEDULE_NAME + "_" + job.getConfig().getId(), 123, 123)); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); assertThat(task.getStats().getNumInvocations(), equalTo(1L)); // Allow search response to return now latch.countDown(); @@ -537,23 +712,42 @@ public void testTriggerWithHeaders() throws Exception { // Wait before progressing latch.await(); - ((ActionListener)invocationOnMock.getArguments()[2]).onResponse(r); + ((ActionListener) invocationOnMock.getArguments()[2]).onResponse(r); return null; }).when(client).execute(anyObject(), anyObject(), anyObject()); SchedulerEngine schedulerEngine = mock(SchedulerEngine.class); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - null, client, schedulerEngine, pool, Collections.emptyMap()) { + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + null, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ) { @Override - public void updatePersistentTaskState(PersistentTaskState taskState, - ActionListener> listener) { + public void updatePersistentTaskState( + PersistentTaskState taskState, + ActionListener> listener + ) { Integer counterValue = counter.getAndIncrement(); if (counterValue == 0) { assertThat(taskState, instanceOf(RollupJobStatus.class)); assertThat(((RollupJobStatus) taskState).getIndexerState(), equalTo(IndexerState.STARTED)); - listener.onResponse(new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, - new PersistentTasksCustomMetadata.Assignment("foo", "foo"))); + listener.onResponse( + new PersistentTasksCustomMetadata.PersistentTask<>( + "foo", + RollupField.TASK_NAME, + job, + 1, + new PersistentTasksCustomMetadata.Assignment("foo", "foo") + ) + ); } else if (counterValue == 1) { finished.set(true); } @@ -561,8 +755,8 @@ public void updatePersistentTaskState(PersistentTaskState taskState, } }; task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertNull(((RollupJobStatus)task.getStatus()).getPosition()); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertNull(((RollupJobStatus) task.getStatus()).getPosition()); task.start(new ActionListener<>() { @Override @@ -580,7 +774,7 @@ public void onFailure(Exception e) { assertBusy(() -> assertTrue(started.get())); task.triggered(new SchedulerEngine.Event(RollupJobTask.SCHEDULE_NAME + "_" + job.getConfig().getId(), 123, 123)); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); assertThat(task.getStats().getNumInvocations(), equalTo(1L)); // Allow search response to return now latch.countDown(); @@ -623,24 +817,43 @@ public void testSaveStateChangesIDScheme() throws Exception { // Wait before progressing latch.await(); - ((ActionListener)invocationOnMock.getArguments()[2]).onResponse(r); + ((ActionListener) invocationOnMock.getArguments()[2]).onResponse(r); return null; }).when(client).execute(anyObject(), anyObject(), anyObject()); SchedulerEngine schedulerEngine = mock(SchedulerEngine.class); RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, null); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()) { + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ) { @Override - public void updatePersistentTaskState(PersistentTaskState taskState, - ActionListener> listener) { + public void updatePersistentTaskState( + PersistentTaskState taskState, + ActionListener> listener + ) { Integer counterValue = counter.getAndIncrement(); if (counterValue == 0) { assertThat(taskState, instanceOf(RollupJobStatus.class)); assertThat(((RollupJobStatus) taskState).getIndexerState(), equalTo(IndexerState.STARTED)); - listener.onResponse(new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, - new PersistentTasksCustomMetadata.Assignment("foo", "foo"))); + listener.onResponse( + new PersistentTasksCustomMetadata.PersistentTask<>( + "foo", + RollupField.TASK_NAME, + job, + 1, + new PersistentTasksCustomMetadata.Assignment("foo", "foo") + ) + ); } else if (counterValue == 1) { finished.set(true); } @@ -648,8 +861,8 @@ public void updatePersistentTaskState(PersistentTaskState taskState, } }; task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertNull(((RollupJobStatus)task.getStatus()).getPosition()); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertNull(((RollupJobStatus) task.getStatus()).getPosition()); task.start(new ActionListener<>() { @Override @@ -667,7 +880,7 @@ public void onFailure(Exception e) { assertBusy(() -> assertTrue(started.get())); task.triggered(new SchedulerEngine.Event(RollupJobTask.SCHEDULE_NAME + "_" + job.getConfig().getId(), 123, 123)); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); assertThat(task.getStats().getNumInvocations(), equalTo(1L)); // Allow search response to return now latch.countDown(); @@ -683,10 +896,20 @@ public void testStopWhenStopped() throws InterruptedException { when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC()); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()); + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ); task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); CountDownLatch latch = new CountDownLatch(1); task.stop(new ActionListener() { @@ -713,11 +936,23 @@ public void testStopWhenStopping() throws InterruptedException { AtomicInteger counter = new AtomicInteger(0); TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - null, client, schedulerEngine, pool, Collections.emptyMap()) { + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + null, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ) { @Override - public void updatePersistentTaskState(PersistentTaskState taskState, - ActionListener> listener) { + public void updatePersistentTaskState( + PersistentTaskState taskState, + ActionListener> listener + ) { assertThat(taskState, instanceOf(RollupJobStatus.class)); int c = counter.get(); if (c == 0) { @@ -731,21 +966,28 @@ public void updatePersistentTaskState(PersistentTaskState taskState, } else { fail("Should not have updated persistent statuses > 4 times"); } - listener.onResponse(new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, - new PersistentTasksCustomMetadata.Assignment("foo", "foo"))); + listener.onResponse( + new PersistentTasksCustomMetadata.PersistentTask<>( + "foo", + RollupField.TASK_NAME, + job, + 1, + new PersistentTasksCustomMetadata.Assignment("foo", "foo") + ) + ); counter.incrementAndGet(); } }; task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); - assertNull(((RollupJobStatus)task.getStatus()).getPosition()); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertNull(((RollupJobStatus) task.getStatus()).getPosition()); CountDownLatch latch = new CountDownLatch(1); task.start(new ActionListener() { @Override public void onResponse(StartRollupJobAction.Response response) { assertTrue(response.isStarted()); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED)); latch.countDown(); } @@ -757,7 +999,7 @@ public void onFailure(Exception e) { assertUnblockIn10s(latch); task.triggered(new SchedulerEngine.Event(RollupJobTask.SCHEDULE_NAME + "_" + job.getConfig().getId(), 123, 123)); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); assertThat(task.getStats().getNumInvocations(), equalTo(1L)); // wait until the search request is send, this is unblocked in the client @@ -806,19 +1048,29 @@ public void testStopWhenAborting() throws InterruptedException { CountDownLatch latch = new CountDownLatch(2); // This isn't really realistic, since start/stop/cancelled are all synchronized... - // the task would end before stop could be called. But to help test out all pathways, + // the task would end before stop could be called. But to help test out all pathways, // just in case, we can override markAsCompleted so it's a no-op and test how stop // handles the situation TaskId taskId = new TaskId("node", 123); - RollupJobTask task = new RollupJobTask(1, "type", "action", taskId, job, - status, client, schedulerEngine, pool, Collections.emptyMap()) { + RollupJobTask task = new RollupJobTask( + 1, + "type", + "action", + taskId, + job, + status, + client, + schedulerEngine, + pool, + Collections.emptyMap() + ) { @Override public void markAsCompleted() { latch.countDown(); } }; task.init(null, mock(TaskManager.class), taskId.toString(), 123); - assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); + assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED)); task.onCancelled(); task.stop(new ActionListener() { @@ -830,8 +1082,10 @@ public void onResponse(StopRollupJobAction.Response response) { @Override public void onFailure(Exception e) { - assertThat(e.getMessage(), equalTo("Cannot stop task for Rollup Job [" - + job.getConfig().getId() + "] because state was [ABORTING]")); + assertThat( + e.getMessage(), + equalTo("Cannot stop task for Rollup Job [" + job.getConfig().getId() + "] because state was [ABORTING]") + ); latch.countDown(); } }); @@ -850,8 +1104,11 @@ private NoOpClient getEmptySearchResponseClient(CountDownLatch unblock, CountDow return new NoOpClient(getTestName()) { @SuppressWarnings("unchecked") @Override - protected - void doExecute(ActionType action, Request request, ActionListener listener) { + protected void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { unblock.countDown(); assertUnblockIn10s(block); listener.onResponse((Response) mock(SearchResponse.class)); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/v2/RollupActionSingleNodeTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/v2/RollupActionSingleNodeTests.java index bbfeea1b196f8..0d2c22f3a9ab6 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/v2/RollupActionSingleNodeTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/v2/RollupActionSingleNodeTests.java @@ -83,7 +83,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -@LuceneTestCase.AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/69799") +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/69799") public class RollupActionSingleNodeTests extends ESSingleNodeTestCase { private static final DateFormatter DATE_FORMATTER = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); @@ -97,8 +97,13 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return List.of(LocalStateCompositeXPackPlugin.class, Rollup.class, AnalyticsPlugin.class, - AggregateMetricMapperPlugin.class, DataStreamsPlugin.class); + return List.of( + LocalStateCompositeXPackPlugin.class, + Rollup.class, + AnalyticsPlugin.class, + AggregateMetricMapperPlugin.class, + DataStreamsPlugin.class + ); } @Before @@ -108,14 +113,23 @@ public void setup() { startTime = randomLongBetween(946769284000L, 1607470084000L); // random date between 2000-2020 docCount = randomIntBetween(10, 1000); - client().admin().indices().prepareCreate(index) + client().admin() + .indices() + .prepareCreate(index) .setSettings(Settings.builder().put("index.number_of_shards", 1).build()) .setMapping( - "date_1", "type=date", - "numeric_1", "type=double", - "numeric_2", "type=float", - "numeric_nonaggregatable", "type=double,doc_values=false", - "categorical_1", "type=keyword").get(); + "date_1", + "type=date", + "numeric_1", + "type=double", + "numeric_2", + "type=float", + "numeric_nonaggregatable", + "type=double,doc_values=false", + "categorical_1", + "type=keyword" + ) + .get(); } @Override @@ -133,14 +147,16 @@ public void tearDown() throws Exception { public void testRollupShardIndexerCleansTempFiles() throws IOException { // create rollup config and index documents into source index RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig("date_1"); - SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder().startObject() + SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() + .startObject() .field("date_1", randomDateForInterval(dateHistogramGroupConfig.getInterval())) .field("categorical_1", randomAlphaOfLength(1)) .field("numeric_1", randomDouble()) .endObject(); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, null, new TermsGroupConfig("categorical_1")), - Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max")))); + Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max"))) + ); bulkIndex(sourceSupplier); IndicesService indexServices = getInstanceFromNode(IndicesService.class); @@ -157,14 +173,16 @@ public void testRollupShardIndexerCleansTempFiles() throws IOException { public void testCannotRollupToExistingIndex() throws Exception { RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig("date_1"); - SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder().startObject() + SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() + .startObject() .field("date_1", randomDateForInterval(dateHistogramGroupConfig.getInterval())) .field("categorical_1", randomAlphaOfLength(1)) .field("numeric_1", randomDouble()) .endObject(); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, null, new TermsGroupConfig("categorical_1")), - Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max")))); + Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max"))) + ); bulkIndex(sourceSupplier); rollup(index, rollupIndex, config); assertRollupIndex(config, index, rollupIndex); @@ -176,39 +194,46 @@ public void testTemporaryIndexCannotBeCreatedAlreadyExists() { RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig("date_1"); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, null, new TermsGroupConfig("categorical_1")), - Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max")))); + Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max"))) + ); assertTrue(client().admin().indices().prepareCreate(".rolluptmp-" + rollupIndex).get().isAcknowledged()); - Exception exception = expectThrows(ElasticsearchException.class, () -> rollup(index, rollupIndex, config)); + Exception exception = expectThrows(ElasticsearchException.class, () -> rollup(index, rollupIndex, config)); assertThat(exception.getMessage(), containsString("already exists")); } public void testCannotRollupWhileOtherRollupInProgress() throws Exception { RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig("date_1"); - SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder().startObject() + SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() + .startObject() .field("date_1", randomDateForInterval(dateHistogramGroupConfig.getInterval())) .field("categorical_1", randomAlphaOfLength(1)) .field("numeric_1", randomDouble()) .endObject(); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, null, new TermsGroupConfig("categorical_1")), - Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max")))); + Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max"))) + ); bulkIndex(sourceSupplier); client().execute(RollupAction.INSTANCE, new RollupAction.Request(index, rollupIndex, config), ActionListener.wrap(() -> {})); - ResourceAlreadyExistsException exception = expectThrows(ResourceAlreadyExistsException.class, - () -> rollup(index, rollupIndex, config)); + ResourceAlreadyExistsException exception = expectThrows( + ResourceAlreadyExistsException.class, + () -> rollup(index, rollupIndex, config) + ); assertThat(exception.getMessage(), containsString(".rolluptmp-" + rollupIndex)); } public void testTermsGrouping() throws IOException { RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig("date_1"); - SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder().startObject() + SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() + .startObject() .field("date_1", randomDateForInterval(dateHistogramGroupConfig.getInterval())) .field("categorical_1", randomAlphaOfLength(1)) .field("numeric_1", randomDouble()) .endObject(); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, null, new TermsGroupConfig("categorical_1")), - Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max")))); + Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max"))) + ); bulkIndex(sourceSupplier); rollup(index, rollupIndex, config); assertRollupIndex(config, index, rollupIndex); @@ -217,14 +242,16 @@ public void testTermsGrouping() throws IOException { public void testHistogramGrouping() throws IOException { long interval = randomLongBetween(1, 1000); RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig("date_1"); - SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder().startObject() + SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() + .startObject() .field("date_1", randomDateForInterval(dateHistogramGroupConfig.getInterval())) .field("numeric_1", randomDoubleBetween(0.0, 10000.0, true)) .field("numeric_2", randomDouble()) .endObject(); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, new HistogramGroupConfig(interval, "numeric_1"), null), - Collections.singletonList(new MetricConfig("numeric_2", Collections.singletonList("max")))); + Collections.singletonList(new MetricConfig("numeric_2", Collections.singletonList("max"))) + ); bulkIndex(sourceSupplier); rollup(index, rollupIndex, config); assertRollupIndex(config, index, rollupIndex); @@ -232,13 +259,15 @@ public void testHistogramGrouping() throws IOException { public void testMaxMetric() throws IOException { RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig("date_1"); - SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder().startObject() + SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() + .startObject() .field("date_1", randomDateForInterval(dateHistogramGroupConfig.getInterval())) .field("numeric_1", randomDouble()) .endObject(); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, null, null), - Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max")))); + Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("max"))) + ); bulkIndex(sourceSupplier); rollup(index, rollupIndex, config); assertRollupIndex(config, index, rollupIndex); @@ -246,13 +275,15 @@ public void testMaxMetric() throws IOException { public void testMinMetric() throws IOException { RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig("date_1"); - SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder().startObject() + SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() + .startObject() .field("date_1", randomDateForInterval(dateHistogramGroupConfig.getInterval())) .field("numeric_1", randomDouble()) .endObject(); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, null, null), - Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("min")))); + Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("min"))) + ); bulkIndex(sourceSupplier); rollup(index, rollupIndex, config); assertRollupIndex(config, index, rollupIndex); @@ -260,13 +291,15 @@ public void testMinMetric() throws IOException { public void testValueCountMetric() throws IOException { RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig("date_1"); - SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder().startObject() + SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() + .startObject() .field("date_1", randomDateForInterval(dateHistogramGroupConfig.getInterval())) .field("numeric_1", randomDouble()) .endObject(); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, null, null), - Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("value_count")))); + Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("value_count"))) + ); bulkIndex(sourceSupplier); rollup(index, rollupIndex, config); assertRollupIndex(config, index, rollupIndex); @@ -274,14 +307,16 @@ public void testValueCountMetric() throws IOException { public void testAvgMetric() throws IOException { RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig("date_1"); - SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder().startObject() + SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() + .startObject() .field("date_1", randomDateForInterval(dateHistogramGroupConfig.getInterval())) // Use integers to ensure that avg is comparable between rollup and original .field("numeric_1", randomInt()) .endObject(); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, null, null), - Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("avg")))); + Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("avg"))) + ); bulkIndex(sourceSupplier); rollup(index, rollupIndex, config); assertRollupIndex(config, index, rollupIndex); @@ -289,14 +324,16 @@ public void testAvgMetric() throws IOException { public void testValidationCheck() throws IOException { RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig("date_1"); - SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder().startObject() + SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() + .startObject() .field("date_1", randomDateForInterval(dateHistogramGroupConfig.getInterval())) // use integers to ensure that avg is comparable between rollup and original .field("numeric_nonaggregatable", randomInt()) .endObject(); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, null, null), - Collections.singletonList(new MetricConfig("numeric_nonaggregatable", Collections.singletonList("avg")))); + Collections.singletonList(new MetricConfig("numeric_nonaggregatable", Collections.singletonList("avg"))) + ); bulkIndex(sourceSupplier); Exception e = expectThrows(Exception.class, () -> rollup(index, rollupIndex, config)); assertThat(e.getMessage(), containsString("The field [numeric_nonaggregatable] must be aggregatable")); @@ -306,13 +343,15 @@ public void testRollupDatastream() throws Exception { RollupActionDateHistogramGroupConfig dateHistogramGroupConfig = randomRollupActionDateHistogramGroupConfig(timestampFieldName); String dataStreamName = createDataStream(); - SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder().startObject() + SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() + .startObject() .field(timestampFieldName, randomDateForInterval(dateHistogramGroupConfig.getInterval())) .field("numeric_1", randomDouble()) .endObject(); RollupActionConfig config = new RollupActionConfig( new RollupActionGroupConfig(dateHistogramGroupConfig, null, null), - Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("value_count")))); + Collections.singletonList(new MetricConfig("numeric_1", Collections.singletonList("value_count"))) + ); bulkIndex(dataStreamName, sourceSupplier); String oldIndexName = rollover(dataStreamName).getOldIndex(); @@ -361,8 +400,10 @@ private void bulkIndex(String indexName, SourceSupplier sourceSupplier) throws I } private void rollup(String sourceIndex, String rollupIndex, RollupActionConfig config) { - AcknowledgedResponse rollupResponse = client().execute(RollupAction.INSTANCE, - new RollupAction.Request(sourceIndex, rollupIndex, config)).actionGet(); + AcknowledgedResponse rollupResponse = client().execute( + RollupAction.INSTANCE, + new RollupAction.Request(sourceIndex, rollupIndex, config) + ).actionGet(); assertTrue(rollupResponse.isAcknowledged()); } @@ -392,14 +433,20 @@ private void assertRollupIndex(RollupActionConfig config, String sourceIndex, St GetIndexResponse indexSettingsResp = client().admin().indices().prepareGetIndex().addIndices(sourceIndex, rollupIndex).get(); // Assert rollup metadata are set in index settings - assertEquals(indexSettingsResp.getSetting(sourceIndex, "index.uuid"), - indexSettingsResp.getSetting(rollupIndex, "index.rollup.source.uuid")); - assertEquals(indexSettingsResp.getSetting(sourceIndex, "index.provided_name"), - indexSettingsResp.getSetting(rollupIndex, "index.rollup.source.name")); + assertEquals( + indexSettingsResp.getSetting(sourceIndex, "index.uuid"), + indexSettingsResp.getSetting(rollupIndex, "index.rollup.source.uuid") + ); + assertEquals( + indexSettingsResp.getSetting(sourceIndex, "index.provided_name"), + indexSettingsResp.getSetting(rollupIndex, "index.rollup.source.name") + ); // Assert field mappings - Map> mappings = (Map>) indexSettingsResp.getMappings().get(rollupIndex) - .getSourceAsMap().get("properties"); + Map> mappings = (Map>) indexSettingsResp.getMappings() + .get(rollupIndex) + .getSourceAsMap() + .get("properties"); RollupActionDateHistogramGroupConfig dateHistoConfig = config.getGroupConfig().getDateHistogram(); assertEquals(DateFieldMapper.CONTENT_TYPE, mappings.get(dateHistoConfig.getField()).get("type")); @@ -421,7 +468,7 @@ private void assertRollupIndex(RollupActionConfig config, String sourceIndex, St HistogramGroupConfig histoConfig = config.getGroupConfig().getHistogram(); if (histoConfig != null) { - for (String field: histoConfig.getFields()) { + for (String field : histoConfig.getFields()) { assertTrue((mappings.containsKey(field))); Map meta = (Map) mappings.get(field).get("meta"); assertEquals(String.valueOf(histoConfig.getInterval()), meta.get("interval")); @@ -436,8 +483,10 @@ private void assertRollupIndex(RollupActionConfig config, String sourceIndex, St } // Assert that temporary index was removed - expectThrows(IndexNotFoundException.class, - () -> client().admin().indices().prepareGetIndex().addIndices(".rolluptmp-" + rollupIndex).get()); + expectThrows( + IndexNotFoundException.class, + () -> client().admin().indices().prepareGetIndex().addIndices(".rolluptmp-" + rollupIndex).get() + ); } private CompositeAggregationBuilder buildCompositeAggs(String name, RollupActionConfig config) { @@ -461,8 +510,7 @@ private CompositeAggregationBuilder buildCompositeAggs(String name, RollupAction if (config.getGroupConfig().getHistogram() != null) { HistogramGroupConfig histoConfig = config.getGroupConfig().getHistogram(); for (String field : histoConfig.getFields()) { - HistogramValuesSourceBuilder source = new HistogramValuesSourceBuilder(field) - .field(field) + HistogramValuesSourceBuilder source = new HistogramValuesSourceBuilder(field).field(field) .interval(histoConfig.getInterval()); sources.add(source); } @@ -554,4 +602,3 @@ private void deleteDataStream(String dataStreamName) throws InterruptedException ); } } - From 35ec6f348c396cec742c987716c30d79f973c2e8 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 31 Aug 2021 08:45:52 +0200 Subject: [PATCH 035/128] Introduce simple public yaml-rest-test plugin (#76554) This introduces a basic public yaml rest test plugin that is supposed to be used by external elasticsearch plugin authors. This is driven by #76215 - Rename yaml-rest-test to intern-yaml-rest-test - Use public yaml plugin in example plugins Co-authored-by: Mark Vieira --- build-tools-internal/build.gradle | 4 +- ...InternalYamlRestTestPluginFuncTest.groovy} | 6 +- .../rest/compat/YamlRestCompatTestPlugin.java | 10 +- ...n.java => InternalYamlRestTestPlugin.java} | 2 +- build-tools/build.gradle | 4 + .../test/YamlRestTestPluginFuncTest.groovy | 64 +++++++++ .../gradle/plugin/PluginBuildPlugin.java | 5 +- .../gradle/test/YamlRestTestPlugin.java | 124 ++++++++++++++++++ modules/aggs-matrix-stats/build.gradle | 2 +- modules/analysis-common/build.gradle | 2 +- modules/geo/build.gradle | 2 +- modules/ingest-common/build.gradle | 2 +- modules/ingest-geoip/build.gradle | 2 +- modules/ingest-user-agent/build.gradle | 2 +- modules/lang-expression/build.gradle | 2 +- modules/lang-mustache/build.gradle | 2 +- modules/lang-painless/build.gradle | 2 +- modules/mapper-extras/build.gradle | 2 +- modules/parent-join/build.gradle | 2 +- modules/percolator/build.gradle | 2 +- modules/rank-eval/build.gradle | 2 +- modules/reindex/build.gradle | 2 +- modules/repository-url/build.gradle | 2 +- modules/runtime-fields-common/build.gradle | 2 +- modules/transport-netty4/build.gradle | 2 +- plugins/analysis-icu/build.gradle | 2 +- plugins/analysis-kuromoji/build.gradle | 2 +- plugins/analysis-nori/build.gradle | 2 +- plugins/analysis-phonetic/build.gradle | 2 +- plugins/analysis-smartcn/build.gradle | 2 +- plugins/analysis-stempel/build.gradle | 2 +- plugins/analysis-ukrainian/build.gradle | 2 +- plugins/discovery-azure-classic/build.gradle | 2 +- plugins/discovery-ec2/build.gradle | 2 +- .../discovery-ec2/qa/amazon-ec2/build.gradle | 6 +- plugins/discovery-gce/build.gradle | 2 +- plugins/discovery-gce/qa/gce/build.gradle | 2 +- plugins/examples/build.gradle | 16 ++- plugins/ingest-attachment/build.gradle | 2 +- plugins/mapper-annotated-text/build.gradle | 2 +- plugins/mapper-murmur3/build.gradle | 2 +- plugins/mapper-size/build.gradle | 2 +- plugins/repository-azure/build.gradle | 2 +- plugins/repository-gcs/build.gradle | 8 +- plugins/repository-s3/build.gradle | 8 +- plugins/store-smb/build.gradle | 2 +- rest-api-spec/build.gradle | 13 +- test/external-modules/build.gradle | 2 +- .../plugin/async-search/qa/rest/build.gradle | 2 +- .../plugin/autoscaling/qa/rest/build.gradle | 2 +- x-pack/plugin/build.gradle | 2 +- x-pack/plugin/ccr/qa/rest/build.gradle | 2 +- x-pack/plugin/core/build.gradle | 2 +- .../plugin/data-streams/qa/rest/build.gradle | 2 +- x-pack/plugin/enrich/qa/rest/build.gradle | 2 +- x-pack/plugin/eql/qa/rest/build.gradle | 2 +- x-pack/plugin/fleet/qa/rest/build.gradle | 2 +- .../graph/qa/with-security/build.gradle | 2 +- x-pack/plugin/ilm/qa/rest/build.gradle | 2 +- .../ml/qa/ml-with-security/build.gradle | 2 +- x-pack/plugin/rollup/qa/rest/build.gradle | 2 +- .../searchable-snapshots/qa/rest/build.gradle | 2 +- x-pack/plugin/spatial/build.gradle | 2 +- x-pack/plugin/stack/qa/rest/build.gradle | 2 +- .../text-structure-with-security/build.gradle | 2 +- x-pack/plugin/watcher/qa/rest/build.gradle | 2 +- .../watcher/qa/with-security/build.gradle | 2 +- x-pack/qa/runtime-fields/build.gradle | 2 +- 68 files changed, 298 insertions(+), 82 deletions(-) rename build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/{YamlRestTestPluginFuncTest.groovy => InternalYamlRestTestPluginFuncTest.groovy} (93%) rename build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/{YamlRestTestPlugin.java => InternalYamlRestTestPlugin.java} (97%) create mode 100644 build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/YamlRestTestPluginFuncTest.groovy create mode 100644 build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index 362e5f2d4bc23..b0363dc795eb0 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -159,8 +159,8 @@ gradlePlugin { implementationClass = 'org.elasticsearch.gradle.internal.rest.compat.YamlRestCompatTestPlugin' } yamlRestTest { - id = 'elasticsearch.yaml-rest-test' - implementationClass = 'org.elasticsearch.gradle.internal.test.rest.YamlRestTestPlugin' + id = 'elasticsearch.internal-yaml-rest-test' + implementationClass = 'org.elasticsearch.gradle.internal.test.rest.InternalYamlRestTestPlugin' } } } diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/YamlRestTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPluginFuncTest.groovy similarity index 93% rename from build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/YamlRestTestPluginFuncTest.groovy rename to build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPluginFuncTest.groovy index 9cfd51d074bee..23554785dde1a 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/YamlRestTestPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPluginFuncTest.groovy @@ -11,13 +11,13 @@ package org.elasticsearch.gradle.internal.test.rest import org.elasticsearch.gradle.fixtures.AbstractRestResourcesFuncTest import org.gradle.testkit.runner.TaskOutcome -class YamlRestTestPluginFuncTest extends AbstractRestResourcesFuncTest { +class InternalYamlRestTestPluginFuncTest extends AbstractRestResourcesFuncTest { def "yamlRestTest does nothing when there are no tests"() { given: buildFile << """ plugins { - id 'elasticsearch.yaml-rest-test' + id 'elasticsearch.internal-yaml-rest-test' } """ @@ -34,7 +34,7 @@ class YamlRestTestPluginFuncTest extends AbstractRestResourcesFuncTest { given: internalBuild() buildFile << """ - apply plugin: 'elasticsearch.yaml-rest-test' + apply plugin: 'elasticsearch.internal-yaml-rest-test' dependencies { yamlRestTestImplementation "junit:junit:4.12" diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java index b104d1aa3df77..9a74a3a55e926 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java @@ -18,7 +18,7 @@ import org.elasticsearch.gradle.internal.test.rest.RestResourcesExtension; import org.elasticsearch.gradle.internal.test.rest.RestResourcesPlugin; import org.elasticsearch.gradle.internal.test.rest.RestTestUtil; -import org.elasticsearch.gradle.internal.test.rest.YamlRestTestPlugin; +import org.elasticsearch.gradle.internal.test.rest.InternalYamlRestTestPlugin; import org.elasticsearch.gradle.testclusters.TestClustersPlugin; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Plugin; @@ -63,15 +63,15 @@ public void apply(Project project) { project.getPluginManager().apply(TestClustersPlugin.class); project.getPluginManager().apply(RestTestBasePlugin.class); project.getPluginManager().apply(RestResourcesPlugin.class); - project.getPluginManager().apply(YamlRestTestPlugin.class); + project.getPluginManager().apply(InternalYamlRestTestPlugin.class); RestResourcesExtension extension = project.getExtensions().getByType(RestResourcesExtension.class); // create source set SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); SourceSet yamlCompatTestSourceSet = sourceSets.create(SOURCE_SET_NAME); - SourceSet yamlTestSourceSet = sourceSets.getByName(YamlRestTestPlugin.SOURCE_SET_NAME); - GradleUtils.extendSourceSet(project, YamlRestTestPlugin.SOURCE_SET_NAME, SOURCE_SET_NAME); + SourceSet yamlTestSourceSet = sourceSets.getByName(InternalYamlRestTestPlugin.SOURCE_SET_NAME); + GradleUtils.extendSourceSet(project, InternalYamlRestTestPlugin.SOURCE_SET_NAME, SOURCE_SET_NAME); // copy compatible rest specs Configuration bwcMinorConfig = project.getConfigurations().create(BWC_MINOR_CONFIG_NAME); @@ -176,7 +176,7 @@ public void apply(Project project) { .minus(project.files(originalYamlTestsDir)) ); // run compatibility tests after "normal" tests - testTask.mustRunAfter(project.getTasks().named(YamlRestTestPlugin.SOURCE_SET_NAME)); + testTask.mustRunAfter(project.getTasks().named(InternalYamlRestTestPlugin.SOURCE_SET_NAME)); testTask.onlyIf(t -> isEnabled(project)); }); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/YamlRestTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPlugin.java similarity index 97% rename from build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/YamlRestTestPlugin.java rename to build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPlugin.java index 5a76543bebca1..25ee8a24b9791 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/YamlRestTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPlugin.java @@ -22,7 +22,7 @@ /** * Apply this plugin to run the YAML based REST tests. */ -public class YamlRestTestPlugin implements Plugin { +public class InternalYamlRestTestPlugin implements Plugin { public static final String SOURCE_SET_NAME = "yamlRestTest"; diff --git a/build-tools/build.gradle b/build-tools/build.gradle index 6743306dafe22..1da2a31390d40 100644 --- a/build-tools/build.gradle +++ b/build-tools/build.gradle @@ -58,6 +58,10 @@ gradlePlugin { id = 'elasticsearch.test-gradle-policy' implementationClass = 'org.elasticsearch.gradle.test.GradleTestPolicySetupPlugin' } + yamlTests { + id = 'elasticsearch.yaml-rest-test' + implementationClass = 'org.elasticsearch.gradle.test.YamlRestTestPlugin' + } } } diff --git a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/YamlRestTestPluginFuncTest.groovy b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/YamlRestTestPluginFuncTest.groovy new file mode 100644 index 0000000000000..af3d7f4e71c92 --- /dev/null +++ b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/YamlRestTestPluginFuncTest.groovy @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.test + +import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.fixtures.AbstractGradleFuncTest +import org.gradle.testkit.runner.TaskOutcome + +class YamlRestTestPluginFuncTest extends AbstractGradleFuncTest { + + def "declares default dependencies"() { + given: + buildFile << """ + plugins { + id 'elasticsearch.yaml-rest-test' + } + """ + + when: + def result = gradleRunner("dependencies").build() + + then: + result.output.contains(""" +restTestSpecs +\\--- org.elasticsearch:rest-api-spec:${VersionProperties.elasticsearch} FAILED +""") + result.output.contains(""" +yamlRestTestImplementation - Implementation only dependencies for source set 'yaml rest test'. (n) +\\--- org.elasticsearch.test:framework:8.0.0-SNAPSHOT (n) +""") + } + + def "yamlRestTest does nothing when there are no tests"() { + given: + buildFile << """ + plugins { + id 'elasticsearch.yaml-rest-test' + } + + repositories { + mavenCentral() + } + + dependencies { + yamlRestTestImplementation "org.elasticsearch.test:framework:7.14.0" + restTestSpecs "org.elasticsearch:rest-api-spec:7.14.0" + } + """ + + when: + def result = gradleRunner("yamlRestTest").build() + then: + result.task(':compileYamlRestTestJava').outcome == TaskOutcome.NO_SOURCE + result.task(':processYamlRestTestResources').outcome == TaskOutcome.NO_SOURCE + result.task(':yamlRestTest').outcome == TaskOutcome.NO_SOURCE + } + +} \ No newline at end of file diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java index 54674d5476f95..fa7cc4d5beb25 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java @@ -54,6 +54,9 @@ * Encapsulates build configuration for an Elasticsearch plugin. */ public class PluginBuildPlugin implements Plugin { + + public static final String BUNDLE_PLUGIN_TASK_NAME = "bundlePlugin"; + @Override public void apply(final Project project) { project.getPluginManager().apply(JavaPlugin.class); @@ -124,7 +127,7 @@ public void apply(final Project project) { project.getTasks().register("run", RunTask.class, runTask -> { runTask.useCluster(runCluster); - runTask.dependsOn(project.getTasks().named("bundlePlugin")); + runTask.dependsOn(project.getTasks().named(BUNDLE_PLUGIN_TASK_NAME)); }); } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java new file mode 100644 index 0000000000000..1ce03787a0756 --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.test; + +import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.plugin.PluginBuildPlugin; +import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; +import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask; +import org.elasticsearch.gradle.testclusters.TestClustersPlugin; +import org.elasticsearch.gradle.transform.UnzipTransform; +import org.gradle.api.Action; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.ConfigurationContainer; +import org.gradle.api.artifacts.dsl.DependencyHandler; +import org.gradle.api.artifacts.type.ArtifactTypeDefinition; +import org.gradle.api.attributes.Attribute; +import org.gradle.api.internal.artifacts.ArtifactAttributes; +import org.gradle.api.plugins.JavaBasePlugin; +import org.gradle.api.tasks.Copy; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.TaskProvider; +import org.gradle.api.tasks.bundling.Zip; + +import java.io.File; + +import static org.elasticsearch.gradle.plugin.PluginBuildPlugin.BUNDLE_PLUGIN_TASK_NAME; + +public class YamlRestTestPlugin implements Plugin { + + public static final String REST_TEST_SPECS_CONFIGURATION_NAME = "restTestSpecs"; + public static final String YAML_REST_TEST = "yamlRestTest"; + + @Override + public void apply(Project project) { + project.getPluginManager().apply(GradleTestPolicySetupPlugin.class); + project.getPluginManager().apply(TestClustersPlugin.class); + project.getPluginManager().apply(JavaBasePlugin.class); + + Attribute restAttribute = Attribute.of("restSpecs", Boolean.class); + project.getDependencies().getAttributesSchema().attribute(restAttribute); + project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.JAR_TYPE); + project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { + transformSpec.getFrom() + .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.JAR_TYPE) + .attribute(restAttribute, true); + transformSpec.getTo() + .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(restAttribute, true); + }); + + ConfigurationContainer configurations = project.getConfigurations(); + Configuration restTestSpecs = configurations.create(REST_TEST_SPECS_CONFIGURATION_NAME); + restTestSpecs.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + restTestSpecs.getAttributes().attribute(restAttribute, true); + + TaskProvider copyRestTestSpecs = project.getTasks().register("copyRestTestSpecs", Copy.class, t -> { + t.from(restTestSpecs); + t.into(new File(project.getBuildDir(), "restResources/restspec")); + }); + + var sourceSets = project.getExtensions().getByType(SourceSetContainer.class); + var testSourceSet = sourceSets.maybeCreate(YAML_REST_TEST); + NamedDomainObjectContainer testClusters = (NamedDomainObjectContainer) project + .getExtensions() + .getByName(TestClustersPlugin.EXTENSION_NAME); + + testSourceSet.getOutput().dir(copyRestTestSpecs.map(Task::getOutputs)); + Configuration yamlRestTestImplementation = configurations.getByName(testSourceSet.getImplementationConfigurationName()); + setupDefaultDependencies(project.getDependencies(), restTestSpecs, yamlRestTestImplementation); + var cluster = testClusters.maybeCreate(YAML_REST_TEST); + TaskProvider yamlRestTestTask = setupTestTask(project, testSourceSet, cluster); + project.getPlugins().withType(PluginBuildPlugin.class, p -> { + TaskProvider bundle = project.getTasks().withType(Zip.class).named(BUNDLE_PLUGIN_TASK_NAME); + cluster.plugin(bundle.flatMap(Zip::getArchiveFile)); + yamlRestTestTask.configure(t -> t.dependsOn(bundle)); + }); + } + + private static void setupDefaultDependencies( + DependencyHandler dependencyHandler, + Configuration restTestSpecs, + Configuration yamlRestTestImplementation + ) { + String elasticsearchVersion = VersionProperties.getElasticsearch(); + yamlRestTestImplementation.defaultDependencies( + deps -> deps.add(dependencyHandler.create("org.elasticsearch.test:framework:" + elasticsearchVersion)) + ); + + restTestSpecs.defaultDependencies( + deps -> deps.add(dependencyHandler.create("org.elasticsearch:rest-api-spec:" + elasticsearchVersion)) + ); + } + + private TaskProvider setupTestTask( + Project project, + SourceSet testSourceSet, + ElasticsearchCluster cluster + ) { + return project.getTasks().register("yamlRestTest", StandaloneRestIntegTestTask.class, task -> { + task.useCluster(cluster); + task.setTestClassesDirs(testSourceSet.getOutput().getClassesDirs()); + task.setClasspath(testSourceSet.getRuntimeClasspath()); + + var nonInputProperties = new SystemPropertyCommandLineArgumentProvider(); + nonInputProperties.systemProperty("tests.rest.cluster", () -> String.join(",", cluster.getAllHttpSocketURI())); + nonInputProperties.systemProperty("tests.cluster", () -> String.join(",", cluster.getAllTransportPortURI())); + nonInputProperties.systemProperty("tests.clustername", () -> cluster.getName()); + task.getJvmArgumentProviders().add(nonInputProperties); + task.systemProperty("tests.rest.load_packaged", Boolean.FALSE.toString()); + }); + } + +} diff --git a/modules/aggs-matrix-stats/build.gradle b/modules/aggs-matrix-stats/build.gradle index 080c9af25194d..fcab80d9891ef 100644 --- a/modules/aggs-matrix-stats/build.gradle +++ b/modules/aggs-matrix-stats/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index 02e715c03bca9..4c39e668c172d 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/geo/build.gradle b/modules/geo/build.gradle index 1c8537df6fc25..66b203084e495 100644 --- a/modules/geo/build.gradle +++ b/modules/geo/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' import org.elasticsearch.gradle.internal.info.BuildParams diff --git a/modules/ingest-common/build.gradle b/modules/ingest-common/build.gradle index af28ddec0cd79..036cde58f9dee 100644 --- a/modules/ingest-common/build.gradle +++ b/modules/ingest-common/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index cf59994ab8d2d..b2d8689e5c2e6 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -8,7 +8,7 @@ import org.apache.tools.ant.taskdefs.condition.Os -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/ingest-user-agent/build.gradle b/modules/ingest-user-agent/build.gradle index 1518b1d64a08c..c14f92bfff052 100644 --- a/modules/ingest-user-agent/build.gradle +++ b/modules/ingest-user-agent/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle index a73c929be0d48..b177abfc6e638 100644 --- a/modules/lang-expression/build.gradle +++ b/modules/lang-expression/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index a4e328ac46080..fd80aece89dfd 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.java-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index 278f95a020e9d..e7251dd7ea0c3 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -8,7 +8,7 @@ import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask; apply plugin: 'elasticsearch.validate-rest-spec' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/modules/mapper-extras/build.gradle b/modules/mapper-extras/build.gradle index f518204d2f056..d63df5a685931 100644 --- a/modules/mapper-extras/build.gradle +++ b/modules/mapper-extras/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/parent-join/build.gradle b/modules/parent-join/build.gradle index 0b3099dc80a6e..088f0c068ef52 100644 --- a/modules/parent-join/build.gradle +++ b/modules/parent-join/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/percolator/build.gradle b/modules/percolator/build.gradle index 81936f44a7ec6..69c30fadec50e 100644 --- a/modules/percolator/build.gradle +++ b/modules/percolator/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/rank-eval/build.gradle b/modules/rank-eval/build.gradle index 308d859931dac..a0a51095c9c85 100644 --- a/modules/rank-eval/build.gradle +++ b/modules/rank-eval/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 277fee4aa6580..37b70c340ab79 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -14,7 +14,7 @@ import org.elasticsearch.gradle.internal.test.AntFixture apply plugin: 'elasticsearch.test-with-dependencies' apply plugin: 'elasticsearch.jdk-download' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.java-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/repository-url/build.gradle b/modules/repository-url/build.gradle index 0245a3ea69b92..a19cae51e8981 100644 --- a/modules/repository-url/build.gradle +++ b/modules/repository-url/build.gradle @@ -8,7 +8,7 @@ import org.elasticsearch.gradle.PropertyNormalization -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.test.fixtures' diff --git a/modules/runtime-fields-common/build.gradle b/modules/runtime-fields-common/build.gradle index 83c4e7410a4c1..54076aaecdf81 100644 --- a/modules/runtime-fields-common/build.gradle +++ b/modules/runtime-fields-common/build.gradle @@ -7,7 +7,7 @@ */ apply plugin: 'elasticsearch.validate-rest-spec' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index 9cd1db75e86d0..4eb68a6e4cf11 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -12,7 +12,7 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.internal.test.rest.JavaRestTestPlugin import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.java-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index e2926ae0cf7a4..babff8e86ee53 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -7,7 +7,7 @@ import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/plugins/analysis-kuromoji/build.gradle b/plugins/analysis-kuromoji/build.gradle index 801d6e3238966..0b3beb8ab5dbe 100644 --- a/plugins/analysis-kuromoji/build.gradle +++ b/plugins/analysis-kuromoji/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/plugins/analysis-nori/build.gradle b/plugins/analysis-nori/build.gradle index 7c0604b56813f..099e620795148 100644 --- a/plugins/analysis-nori/build.gradle +++ b/plugins/analysis-nori/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/plugins/analysis-phonetic/build.gradle b/plugins/analysis-phonetic/build.gradle index bd8af3ff93a12..89ff6f900c802 100644 --- a/plugins/analysis-phonetic/build.gradle +++ b/plugins/analysis-phonetic/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/plugins/analysis-smartcn/build.gradle b/plugins/analysis-smartcn/build.gradle index e997c81b3fc82..3a3bd3012fd86 100644 --- a/plugins/analysis-smartcn/build.gradle +++ b/plugins/analysis-smartcn/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/plugins/analysis-stempel/build.gradle b/plugins/analysis-stempel/build.gradle index bc5e973b9d980..3b1eb7623f947 100644 --- a/plugins/analysis-stempel/build.gradle +++ b/plugins/analysis-stempel/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/plugins/analysis-ukrainian/build.gradle b/plugins/analysis-ukrainian/build.gradle index 3c869e9195b19..2a059bd746c93 100644 --- a/plugins/analysis-ukrainian/build.gradle +++ b/plugins/analysis-ukrainian/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 3101b0b343c58..a97c9d8e5a11c 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -8,7 +8,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 6eac2f760f655..0a8f299955850 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -7,7 +7,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle index 5d87ad59fef1f..e8afba63970c9 100644 --- a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle +++ b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle @@ -10,11 +10,11 @@ import org.apache.tools.ant.filters.ReplaceTokens import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.test.rest.YamlRestTestPlugin +import org.elasticsearch.gradle.internal.test.rest.InternalYamlRestTestPlugin import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' dependencies { yamlRestTestImplementation project(':plugins:discovery-ec2') @@ -62,7 +62,7 @@ tasks.named("yamlRestTest").configure { enabled = false } def yamlRestTestTask = tasks.register("yamlRestTest${action}", RestIntegTestTask) { dependsOn fixture SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); - SourceSet yamlRestTestSourceSet = sourceSets.getByName(YamlRestTestPlugin.SOURCE_SET_NAME) + SourceSet yamlRestTestSourceSet = sourceSets.getByName(InternalYamlRestTestPlugin.SOURCE_SET_NAME) testClassesDirs = yamlRestTestSourceSet.getOutput().getClassesDirs() classpath = yamlRestTestSourceSet.getRuntimeClasspath() } diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 738929e6f2b26..b936669d4ee26 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -1,4 +1,4 @@ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/plugins/discovery-gce/qa/gce/build.gradle b/plugins/discovery-gce/qa/gce/build.gradle index 86fbf30963461..c2cd26c2e928e 100644 --- a/plugins/discovery-gce/qa/gce/build.gradle +++ b/plugins/discovery-gce/qa/gce/build.gradle @@ -13,7 +13,7 @@ import org.elasticsearch.gradle.internal.test.AntFixture import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' final int gceNumberOfNodes = 3 diff --git a/plugins/examples/build.gradle b/plugins/examples/build.gradle index e3bbc6cb22b34..31bcccd9c5ece 100644 --- a/plugins/examples/build.gradle +++ b/plugins/examples/build.gradle @@ -1,14 +1,24 @@ import org.elasticsearch.gradle.internal.info.BuildParams // Subprojects aren't published so do not assemble -subprojects { - project.tasks.matching { it.name.equals('assemble') }.configureEach { +subprojects { p -> + p.tasks.matching { it.name.equals('assemble') }.configureEach { enabled = false } if (BuildParams.inFipsJvm) { - project.tasks.configureEach { + p.tasks.configureEach { enabled = false } } + + // configure project dependencies for yaml rest test plugin. + // plugin defaults to external available artifacts + p.getPluginManager().withPlugin("elasticsearch.yaml-rest-test", new Action() { + @Override + void execute(AppliedPlugin appliedPlugin) { + p.dependencies.add("yamlRestTestImplementation", project(":test:framework")) + p.dependencies.add("restTestSpecs", p.dependencies.project(path:':rest-api-spec', configuration:'basicRestSpecs')) + } + }) } configure(project('painless-whitelist')) { diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index 0f5f7a4031789..bd138a42df2f0 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -7,7 +7,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/plugins/mapper-annotated-text/build.gradle b/plugins/mapper-annotated-text/build.gradle index 3bf05291ef2ce..de3c0b1c6fb37 100644 --- a/plugins/mapper-annotated-text/build.gradle +++ b/plugins/mapper-annotated-text/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/plugins/mapper-murmur3/build.gradle b/plugins/mapper-murmur3/build.gradle index 8bfdff8fd9130..81df70e3855bf 100644 --- a/plugins/mapper-murmur3/build.gradle +++ b/plugins/mapper-murmur3/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/plugins/mapper-size/build.gradle b/plugins/mapper-size/build.gradle index 12176fff28c5a..ca4738335d0cc 100644 --- a/plugins/mapper-size/build.gradle +++ b/plugins/mapper-size/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 82459bd6f78ca..b5f7b4d498312 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -13,7 +13,7 @@ import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-test-artifact-base' diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index b64d911d0663a..6bd792096b022 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -1,7 +1,7 @@ import org.apache.tools.ant.filters.ReplaceTokens import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.test.rest.YamlRestTestPlugin +import org.elasticsearch.gradle.internal.test.rest.InternalYamlRestTestPlugin import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin import java.nio.file.Files @@ -16,7 +16,7 @@ import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-test-artifact-base' @@ -286,7 +286,7 @@ def largeBlobYamlRestTest = tasks.register("largeBlobYamlRestTest", RestIntegTes dependsOn "createServiceAccountFile" } SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); - SourceSet yamlRestTestSourceSet = sourceSets.getByName(YamlRestTestPlugin.SOURCE_SET_NAME) + SourceSet yamlRestTestSourceSet = sourceSets.getByName(InternalYamlRestTestPlugin.SOURCE_SET_NAME) setTestClassesDirs(yamlRestTestSourceSet.getOutput().getClassesDirs()) setClasspath(yamlRestTestSourceSet.getRuntimeClasspath()) @@ -339,7 +339,7 @@ if (useFixture) { tasks.register("yamlRestTestApplicationDefaultCredentials", RestIntegTestTask.class) { dependsOn('bundlePlugin') SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); - SourceSet yamlRestTestSourceSet = sourceSets.getByName(YamlRestTestPlugin.SOURCE_SET_NAME) + SourceSet yamlRestTestSourceSet = sourceSets.getByName(InternalYamlRestTestPlugin.SOURCE_SET_NAME) setTestClassesDirs(yamlRestTestSourceSet.getOutput().getClassesDirs()) setClasspath(yamlRestTestSourceSet.getRuntimeClasspath()) } diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 0a83463d21dff..0c98d919f682b 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -1,7 +1,7 @@ import org.apache.tools.ant.filters.ReplaceTokens import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.test.rest.YamlRestTestPlugin +import org.elasticsearch.gradle.internal.test.rest.InternalYamlRestTestPlugin import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE @@ -13,7 +13,7 @@ import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-test-artifact-base' @@ -218,7 +218,7 @@ if (useFixture) { description = "Runs REST tests using the Minio repository." dependsOn tasks.named("bundlePlugin") SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); - SourceSet yamlRestTestSourceSet = sourceSets.getByName(YamlRestTestPlugin.SOURCE_SET_NAME) + SourceSet yamlRestTestSourceSet = sourceSets.getByName(InternalYamlRestTestPlugin.SOURCE_SET_NAME) setTestClassesDirs(yamlRestTestSourceSet.getOutput().getClassesDirs()) setClasspath(yamlRestTestSourceSet.getRuntimeClasspath()) @@ -246,7 +246,7 @@ if (useFixture) { description = "Runs tests using the ECS repository." dependsOn('bundlePlugin') SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); - SourceSet yamlRestTestSourceSet = sourceSets.getByName(YamlRestTestPlugin.SOURCE_SET_NAME) + SourceSet yamlRestTestSourceSet = sourceSets.getByName(InternalYamlRestTestPlugin.SOURCE_SET_NAME) setTestClassesDirs(yamlRestTestSourceSet.getOutput().getClassesDirs()) setClasspath(yamlRestTestSourceSet.getRuntimeClasspath()) systemProperty 'tests.rest.blacklist', [ diff --git a/plugins/store-smb/build.gradle b/plugins/store-smb/build.gradle index 728e9642d9c29..91c7dcadf0226 100644 --- a/plugins/store-smb/build.gradle +++ b/plugins/store-smb/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index a126408b5dbeb..7558b033b8137 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -4,7 +4,7 @@ apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' apply plugin: 'elasticsearch.rest-resources' apply plugin: 'elasticsearch.validate-rest-spec' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' restResources { @@ -17,7 +17,18 @@ restResources { ext.projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0']) ext.licenseFile = rootProject.file('licenses/APACHE-LICENSE-2.0.txt') +configurations { + // configuration to make use by external yaml rest test plugin in our examples + // easy and efficient + basicRestSpecs { + attributes { + attribute(org.gradle.api.internal.artifacts.ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + } + } +} + artifacts { + basicRestSpecs(new File(projectDir, "src/main/resources")) restSpecs(new File(projectDir, "src/main/resources/rest-api-spec/api")) restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } diff --git a/test/external-modules/build.gradle b/test/external-modules/build.gradle index 396ecf5eb618e..68408b1f585d4 100644 --- a/test/external-modules/build.gradle +++ b/test/external-modules/build.gradle @@ -3,7 +3,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams; subprojects { apply plugin: 'elasticsearch.internal-es-plugin' - apply plugin: 'elasticsearch.yaml-rest-test' + apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { name it.name diff --git a/x-pack/plugin/async-search/qa/rest/build.gradle b/x-pack/plugin/async-search/qa/rest/build.gradle index 63e2e3fea3122..545925f34ef55 100644 --- a/x-pack/plugin/async-search/qa/rest/build.gradle +++ b/x-pack/plugin/async-search/qa/rest/build.gradle @@ -1,7 +1,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/x-pack/plugin/autoscaling/qa/rest/build.gradle b/x-pack/plugin/autoscaling/qa/rest/build.gradle index e95ad88469057..22bd3b6f0395a 100644 --- a/x-pack/plugin/autoscaling/qa/rest/build.gradle +++ b/x-pack/plugin/autoscaling/qa/rest/build.gradle @@ -1,6 +1,6 @@ import org.elasticsearch.gradle.internal.info.BuildParams -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' dependencies { diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index b5ebc0e18c854..fff40db3823fe 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -2,7 +2,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.util.GradleUtils import org.elasticsearch.gradle.internal.test.RestIntegTestTask -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.validate-rest-spec' apply plugin: 'elasticsearch.internal-test-artifact' diff --git a/x-pack/plugin/ccr/qa/rest/build.gradle b/x-pack/plugin/ccr/qa/rest/build.gradle index b96bf48d8854f..bb9d390229deb 100644 --- a/x-pack/plugin/ccr/qa/rest/build.gradle +++ b/x-pack/plugin/ccr/qa/rest/build.gradle @@ -1,4 +1,4 @@ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' restResources { diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 9b8a1887f2dc0..7504cae334aa5 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -6,7 +6,7 @@ import java.nio.file.Paths apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.publish' apply plugin: 'elasticsearch.internal-cluster-test' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-test-artifact' diff --git a/x-pack/plugin/data-streams/qa/rest/build.gradle b/x-pack/plugin/data-streams/qa/rest/build.gradle index 9ef638db9e3e7..f36367babb5ea 100644 --- a/x-pack/plugin/data-streams/qa/rest/build.gradle +++ b/x-pack/plugin/data-streams/qa/rest/build.gradle @@ -1,7 +1,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.java-rest-test' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' restResources { diff --git a/x-pack/plugin/enrich/qa/rest/build.gradle b/x-pack/plugin/enrich/qa/rest/build.gradle index ca3e1409a9146..38292cd807159 100644 --- a/x-pack/plugin/enrich/qa/rest/build.gradle +++ b/x-pack/plugin/enrich/qa/rest/build.gradle @@ -1,5 +1,5 @@ apply plugin: 'elasticsearch.java-rest-test' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' import org.elasticsearch.gradle.internal.info.BuildParams diff --git a/x-pack/plugin/eql/qa/rest/build.gradle b/x-pack/plugin/eql/qa/rest/build.gradle index f86a868e87e13..7ee4fc92b3efe 100644 --- a/x-pack/plugin/eql/qa/rest/build.gradle +++ b/x-pack/plugin/eql/qa/rest/build.gradle @@ -1,5 +1,5 @@ apply plugin: 'elasticsearch.java-rest-test' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' import org.elasticsearch.gradle.internal.info.BuildParams diff --git a/x-pack/plugin/fleet/qa/rest/build.gradle b/x-pack/plugin/fleet/qa/rest/build.gradle index b4d566a54bfd3..a5e8c02f22bef 100644 --- a/x-pack/plugin/fleet/qa/rest/build.gradle +++ b/x-pack/plugin/fleet/qa/rest/build.gradle @@ -1,6 +1,6 @@ import org.elasticsearch.gradle.internal.info.BuildParams -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' dependencies { yamlRestTestImplementation(testArtifact(project(xpackModule('core')))) diff --git a/x-pack/plugin/graph/qa/with-security/build.gradle b/x-pack/plugin/graph/qa/with-security/build.gradle index 8109160f1950f..065ac82c8efba 100644 --- a/x-pack/plugin/graph/qa/with-security/build.gradle +++ b/x-pack/plugin/graph/qa/with-security/build.gradle @@ -1,4 +1,4 @@ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' dependencies { yamlRestTestImplementation project(":x-pack:plugin:core") diff --git a/x-pack/plugin/ilm/qa/rest/build.gradle b/x-pack/plugin/ilm/qa/rest/build.gradle index 2a3bfd39a838e..6964fd717e848 100644 --- a/x-pack/plugin/ilm/qa/rest/build.gradle +++ b/x-pack/plugin/ilm/qa/rest/build.gradle @@ -1,4 +1,4 @@ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' dependencies { diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index 291267de4be6c..5ecfefb85bd95 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -1,4 +1,4 @@ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' dependencies { yamlRestTestImplementation(testArtifact(project(xpackModule('core')))) diff --git a/x-pack/plugin/rollup/qa/rest/build.gradle b/x-pack/plugin/rollup/qa/rest/build.gradle index ddb8eff0b2a24..9ab08ad5af84c 100644 --- a/x-pack/plugin/rollup/qa/rest/build.gradle +++ b/x-pack/plugin/rollup/qa/rest/build.gradle @@ -6,7 +6,7 @@ */ import org.elasticsearch.gradle.internal.info.BuildParams -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' dependencies { diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/build.gradle b/x-pack/plugin/searchable-snapshots/qa/rest/build.gradle index defe6eb672ea7..b340bbdcf5d70 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/rest/build.gradle @@ -1,5 +1,5 @@ apply plugin: 'elasticsearch.java-rest-test' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' dependencies { diff --git a/x-pack/plugin/spatial/build.gradle b/x-pack/plugin/spatial/build.gradle index fd93d5b67dcc3..20a9917d6cad4 100644 --- a/x-pack/plugin/spatial/build.gradle +++ b/x-pack/plugin/spatial/build.gradle @@ -1,6 +1,6 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { diff --git a/x-pack/plugin/stack/qa/rest/build.gradle b/x-pack/plugin/stack/qa/rest/build.gradle index 8f95a44c68dd7..8990f8ef8c35a 100644 --- a/x-pack/plugin/stack/qa/rest/build.gradle +++ b/x-pack/plugin/stack/qa/rest/build.gradle @@ -1,4 +1,4 @@ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' dependencies { diff --git a/x-pack/plugin/text-structure/qa/text-structure-with-security/build.gradle b/x-pack/plugin/text-structure/qa/text-structure-with-security/build.gradle index d56ccd5837a70..219ce40562c3c 100644 --- a/x-pack/plugin/text-structure/qa/text-structure-with-security/build.gradle +++ b/x-pack/plugin/text-structure/qa/text-structure-with-security/build.gradle @@ -1,4 +1,4 @@ -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' dependencies { yamlRestTestImplementation(testArtifact(project(xpackModule('core')))) diff --git a/x-pack/plugin/watcher/qa/rest/build.gradle b/x-pack/plugin/watcher/qa/rest/build.gradle index f48e5c4db0bf1..c0ba07637dc55 100644 --- a/x-pack/plugin/watcher/qa/rest/build.gradle +++ b/x-pack/plugin/watcher/qa/rest/build.gradle @@ -1,7 +1,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.java-rest-test' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' dependencies { diff --git a/x-pack/plugin/watcher/qa/with-security/build.gradle b/x-pack/plugin/watcher/qa/with-security/build.gradle index 2d04da059f9aa..2b284cc79cd0d 100644 --- a/x-pack/plugin/watcher/qa/with-security/build.gradle +++ b/x-pack/plugin/watcher/qa/with-security/build.gradle @@ -1,5 +1,5 @@ apply plugin: 'elasticsearch.java-rest-test' -apply plugin: 'elasticsearch.yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' dependencies { diff --git a/x-pack/qa/runtime-fields/build.gradle b/x-pack/qa/runtime-fields/build.gradle index f7698ae96a964..e5f5c228e6e4a 100644 --- a/x-pack/qa/runtime-fields/build.gradle +++ b/x-pack/qa/runtime-fields/build.gradle @@ -9,7 +9,7 @@ tasks.named("test").configure { enabled = false } subprojects { if (project.name.startsWith('core-with-')) { - apply plugin: 'elasticsearch.yaml-rest-test' + apply plugin: 'elasticsearch.internal-yaml-rest-test' dependencies { yamlRestTestImplementation xpackProject(":x-pack:qa:runtime-fields") From 441d7b51999f12aa064d0cd9e2f8e975074cda6e Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Tue, 31 Aug 2021 09:14:42 +0200 Subject: [PATCH 036/128] Watcher: Fix index action simulation when indexing several documents (#76820) When using the index action to index several documents at once, simulation the action ended up in indexed documents because there was not break in case the action should only be simulated. This commits adds such an abortion condition together with a proper simulation response. Closes #74148 #66735 --- .../watcher/execute_watch/50_action_mode.yml | 34 +++++++++++++++++++ .../actions/index/ExecutableIndexAction.java | 19 ++++++++++- .../actions/index/IndexActionTests.java | 22 ++++++++++++ 3 files changed, 74 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/execute_watch/50_action_mode.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/execute_watch/50_action_mode.yml index 38dcabf5601a8..4d3942bc08396 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/execute_watch/50_action_mode.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/execute_watch/50_action_mode.yml @@ -71,3 +71,37 @@ teardown: - match: { watch_record.result.actions.0.id: "logging" } - match: { watch_record.result.actions.0.status: "simulated" } +--- +"Test simulate index action": + - do: + watcher.execute_watch: + body: > + { + "watch": { + "trigger": { + "schedule" : { "cron" : "0 0 0 1 * ? 2099" } + }, + "input": { + "simple": { + "foo": "bar" + } + }, + "actions": { + "index_payload" : { + "transform": { "script": "return ['_doc':[['_id':'the-id','_index':'the-index','a':'b']]]"}, + "index" : {} + } + } + }, + "action_modes" : { + "_all" : "simulate" + } + } + + - match: { watch_record.trigger_event.type: "manual" } + - match: { watch_record.state: "executed" } + - match: { watch_record.status.execution_state: "executed" } + - match: { watch_record.result.actions.0.id: "index_payload" } + - match: { watch_record.result.actions.0.status: "simulated" } + - match: { watch_record.result.actions.0.index.request.source.0._id: "the-id" } + - match: { watch_record.result.actions.0.index.request.source.0._index: "the-index" } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java index fc219399af001..1fcbc53e8fca3 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.actions.index; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -15,9 +16,9 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.watcher.actions.Action; import org.elasticsearch.xpack.core.watcher.actions.Action.Result.Status; @@ -145,6 +146,22 @@ Action.Result indexBulk(Iterable list, String actionId, WatchExecutionContext } bulkRequest.add(indexRequest); } + + if (ctx.simulateAction(actionId)) { + try (XContentBuilder builder = jsonBuilder().startArray()) { + for (DocWriteRequest request : bulkRequest.requests()) { + builder.startObject(); + builder.field("_id", request.id()); + builder.field("_index", request.index()); + builder.endObject(); + } + builder.endArray(); + + return new IndexAction.Simulated("", "", + action.refreshPolicy, new XContentSource(BytesReference.bytes(builder), XContentType.JSON)); + } + } + ClientHelper.assertNoAuthorizationHeader(ctx.watch().status().getHeaders()); BulkResponse bulkResponse = ClientHelper.executeWithHeaders(ctx.watch().status().getHeaders(), ClientHelper.WATCHER_ORIGIN, client, () -> client.bulk(bulkRequest).actionGet(bulkDefaultTimeout)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java index f39a592077d2d..ec92e60ef956f 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java @@ -60,6 +60,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.startsWith; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; public class IndexActionTests extends ESTestCase { @@ -383,4 +384,25 @@ public void testFailureResult() throws Exception { assertThat(result.status(), is(Status.FAILURE)); } } + + public void testIndexSeveralDocumentsIsSimulated() throws Exception { + IndexAction action = new IndexAction("test-index", null, null, "@timestamp", null, null, refreshPolicy); + ExecutableIndexAction executable = new ExecutableIndexAction(action, logger, client, + TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(30)); + + String docId = randomAlphaOfLength(5); + final List> docs = List.of(Map.of("foo", "bar", "_id", docId)); + Payload payload; + if (randomBoolean()) { + payload = new Payload.Simple("_doc", docs); + } else { + payload = new Payload.Simple("_doc", docs.toArray()); + } + WatchExecutionContext ctx = WatcherTestUtils.mockExecutionContext("_id", ZonedDateTime.now(ZoneOffset.UTC), payload); + when(ctx.simulateAction("my_id")).thenReturn(true); + + Action.Result result = executable.execute("my_id", ctx, payload); + assertThat(result.status(), is(Status.SIMULATED)); + verifyZeroInteractions(client); + } } From 2d0773cd3ceb771d8e54e3e2c47637d2c6a02fe6 Mon Sep 17 00:00:00 2001 From: markharwood Date: Tue, 31 Aug 2021 11:35:54 +0100 Subject: [PATCH 037/128] Add support for QueryString-parsed wildcard queries on runtime keyword fields. (#76867) The QueryStringQuery parser assumes that wildcard queries should use normalized values in queries. The KeywordScriptFieldType did not support this so was throwing an error. Given there is currently no concept of normalisation in scripted fields I assume it is safe to just add support for this in the same way un-normalized wildcard queries are handled - it feels right that they should behave the same rather than throw an error. Added a test too. Closes #76838 --- .../index/mapper/KeywordScriptFieldType.java | 6 ++++++ .../index/mapper/KeywordScriptFieldTypeTests.java | 12 ++++++++++++ 2 files changed, 18 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java index edcb1f5cae382..13726b6a67666 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java @@ -215,4 +215,10 @@ public Query wildcardQuery(String value, RewriteMethod method, boolean caseInsen checkAllowExpensiveQueries(context); return new StringScriptFieldWildcardQuery(script, leafFactory(context), name(), value, caseInsensitive); } + + @Override + public Query normalizedWildcardQuery(String value, RewriteMethod method, SearchExecutionContext context) { + checkAllowExpensiveQueries(context); + return new StringScriptFieldWildcardQuery(script, leafFactory(context), name(), value, false); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java index c0629dd3f0373..42d1093d5f173 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java @@ -326,6 +326,18 @@ public void testWildcardQuery() throws IOException { } } } + + // Normalized WildcardQueries are requested by the QueryStringQueryParser + public void testNormalizedWildcardQuery() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aab\"]}")))); + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); + try (DirectoryReader reader = iw.getReader()) { + IndexSearcher searcher = newSearcher(reader); + assertThat(searcher.count(simpleMappedFieldType().normalizedWildcardQuery("a*b", null, mockContext())), equalTo(1)); + } + } + } public void testWildcardQueryIsExpensive() { checkExpensiveQuery(this::randomWildcardQuery); From 90d289932332bc0b90c0f557caf6296f78e8f80e Mon Sep 17 00:00:00 2001 From: Dan Hermann Date: Tue, 31 Aug 2021 06:40:52 -0500 Subject: [PATCH 038/128] ECS support for Grok processor (#76885) --- .../java/org/elasticsearch/grok/Grok.java | 48 +- .../src/main/resources/patterns/ecs-v1/aws | 28 ++ .../src/main/resources/patterns/ecs-v1/bacula | 53 +++ .../src/main/resources/patterns/ecs-v1/bind | 13 + .../src/main/resources/patterns/ecs-v1/bro | 30 ++ .../src/main/resources/patterns/ecs-v1/exim | 26 + .../main/resources/patterns/ecs-v1/firewalls | 111 +++++ .../resources/patterns/ecs-v1/grok-patterns | 95 ++++ .../main/resources/patterns/ecs-v1/haproxy | 40 ++ .../src/main/resources/patterns/ecs-v1/httpd | 17 + .../src/main/resources/patterns/ecs-v1/java | 34 ++ .../src/main/resources/patterns/ecs-v1/junos | 13 + .../resources/patterns/ecs-v1/linux-syslog | 16 + .../resources/patterns/{ => ecs-v1}/maven | 0 .../resources/patterns/ecs-v1/mcollective | 4 + .../main/resources/patterns/ecs-v1/mongodb | 7 + .../src/main/resources/patterns/ecs-v1/nagios | 124 +++++ .../main/resources/patterns/ecs-v1/postgresql | 2 + .../src/main/resources/patterns/ecs-v1/rails | 13 + .../src/main/resources/patterns/ecs-v1/redis | 3 + .../src/main/resources/patterns/ecs-v1/ruby | 2 + .../src/main/resources/patterns/ecs-v1/squid | 6 + .../src/main/resources/patterns/ecs-v1/zeek | 33 ++ .../main/resources/patterns/{ => legacy}/aws | 0 .../resources/patterns/{ => legacy}/bacula | 0 .../main/resources/patterns/{ => legacy}/bind | 0 .../main/resources/patterns/{ => legacy}/bro | 0 .../main/resources/patterns/{ => legacy}/exim | 0 .../resources/patterns/{ => legacy}/firewalls | 0 .../patterns/{ => legacy}/grok-patterns | 0 .../resources/patterns/{ => legacy}/haproxy | 0 .../resources/patterns/{ => legacy}/httpd | 0 .../main/resources/patterns/{ => legacy}/java | 0 .../resources/patterns/{ => legacy}/junos | 0 .../patterns/{ => legacy}/linux-syslog | 0 .../src/main/resources/patterns/legacy/maven | 1 + .../{ => legacy}/mcollective-patterns | 0 .../resources/patterns/{ => legacy}/mongodb | 0 .../resources/patterns/{ => legacy}/nagios | 0 .../patterns/{ => legacy}/postgresql | 0 .../resources/patterns/{ => legacy}/rails | 0 .../resources/patterns/{ => legacy}/redis | 0 .../main/resources/patterns/{ => legacy}/ruby | 0 .../resources/patterns/{ => legacy}/squid | 0 .../org/elasticsearch/grok/GrokTests.java | 450 ++++++++++++++---- .../ingest/common/GrokProcessor.java | 15 +- .../ingest/common/GrokProcessorGetAction.java | 48 +- .../ingest/common/IngestCommonPlugin.java | 3 +- .../common/GrokProcessorFactoryTests.java | 27 +- .../common/GrokProcessorGetActionTests.java | 83 +++- .../runtimefields/NamedGroupExtractor.java | 4 +- .../test/ingest/20_combine_processors.yml | 57 +++ .../categorization/GrokPatternCreator.java | 2 +- .../structurefinder/GrokPatternCreator.java | 14 +- .../structurefinder/TextStructureUtils.java | 6 +- .../TimestampFormatFinder.java | 4 +- .../structurefinder/TimeoutCheckerTests.java | 2 +- 57 files changed, 1267 insertions(+), 167 deletions(-) create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/aws create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/bacula create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/bind create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/bro create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/exim create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/firewalls create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/grok-patterns create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/haproxy create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/httpd create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/java create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/junos create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/linux-syslog rename libs/grok/src/main/resources/patterns/{ => ecs-v1}/maven (100%) create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/mcollective create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/mongodb create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/nagios create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/postgresql create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/rails create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/redis create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/ruby create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/squid create mode 100644 libs/grok/src/main/resources/patterns/ecs-v1/zeek rename libs/grok/src/main/resources/patterns/{ => legacy}/aws (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/bacula (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/bind (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/bro (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/exim (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/firewalls (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/grok-patterns (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/haproxy (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/httpd (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/java (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/junos (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/linux-syslog (100%) create mode 100644 libs/grok/src/main/resources/patterns/legacy/maven rename libs/grok/src/main/resources/patterns/{ => legacy}/mcollective-patterns (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/mongodb (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/nagios (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/postgresql (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/rails (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/redis (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/ruby (100%) rename libs/grok/src/main/resources/patterns/{ => legacy}/squid (100%) diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java index 8e26acf58d17e..984c992b765e0 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java @@ -22,6 +22,7 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; @@ -31,10 +32,14 @@ import java.util.function.Consumer; public final class Grok { + + public static final String[] ECS_COMPATIBILITY_MODES = {"disabled", "v1"}; + /** * Patterns built in to the grok library. */ - public static final Map BUILTIN_PATTERNS = loadBuiltinPatterns(); + private static Map LEGACY_PATTERNS; + private static Map ECS_V1_PATTERNS; private static final String NAME_GROUP = "name"; private static final String SUBNAME_GROUP = "subname"; @@ -296,16 +301,51 @@ public List captureConfig() { /** * Load built-in patterns. */ - private static Map loadBuiltinPatterns() { - String[] patternNames = new String[] { + public static synchronized Map getBuiltinPatterns(boolean ecsCompatibility) { + if (ecsCompatibility) { + if (ECS_V1_PATTERNS == null) { + ECS_V1_PATTERNS = loadPatterns(ecsCompatibility); + } + return ECS_V1_PATTERNS; + } else { + if (LEGACY_PATTERNS == null) { + LEGACY_PATTERNS = loadPatterns(ecsCompatibility); + } + return LEGACY_PATTERNS; + } + } + + public static Map getBuiltinPatterns(String ecsCompatibility) { + if (isValidEcsCompatibilityMode(ecsCompatibility)) { + return getBuiltinPatterns(ECS_COMPATIBILITY_MODES[1].equals(ecsCompatibility)); + } else { + throw new IllegalArgumentException("unsupported ECS compatibility mode [" + ecsCompatibility + "]"); + } + } + + public static boolean isValidEcsCompatibilityMode(String ecsCompatibility) { + return Arrays.asList(ECS_COMPATIBILITY_MODES).contains(ecsCompatibility); + } + + private static Map loadPatterns(boolean ecsCompatibility) { + String[] legacyPatternNames = { "aws", "bacula", "bind", "bro", "exim", "firewalls", "grok-patterns", "haproxy", "httpd", "java", "junos", "linux-syslog", "maven", "mcollective-patterns", "mongodb", "nagios", "postgresql", "rails", "redis", "ruby", "squid" }; + String[] ecsPatternNames = { + "aws", "bacula", "bind", "bro", "exim", "firewalls", "grok-patterns", "haproxy", + "httpd", "java", "junos", "linux-syslog", "maven", "mcollective", "mongodb", "nagios", + "postgresql", "rails", "redis", "ruby", "squid", "zeek" + }; + + String[] patternNames = ecsCompatibility ? ecsPatternNames : legacyPatternNames; + String directory = ecsCompatibility ? "/patterns/ecs-v1/" : "/patterns/legacy/"; + Map builtinPatterns = new LinkedHashMap<>(); for (String pattern : patternNames) { try { - try(InputStream is = Grok.class.getResourceAsStream("/patterns/" + pattern)) { + try (InputStream is = Grok.class.getResourceAsStream(directory + pattern)) { loadPatterns(builtinPatterns, is); } } catch (IOException e) { diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/aws b/libs/grok/src/main/resources/patterns/ecs-v1/aws new file mode 100644 index 0000000000000..35d1467adce08 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/aws @@ -0,0 +1,28 @@ +S3_REQUEST_LINE (?:%{WORD:http.request.method} %{NOTSPACE:url.original}(?: HTTP/%{NUMBER:http.version})?) + +S3_ACCESS_LOG %{WORD:aws.s3access.bucket_owner} %{NOTSPACE:aws.s3access.bucket} \%{HTTPDATE:timestamp}\ (?:-|%{IP:client.ip}) (?:-|%{NOTSPACE:client.user.id}) %{NOTSPACE:aws.s3access.request_id} %{NOTSPACE:aws.s3access.operation} (?:-|%{NOTSPACE:aws.s3access.key}) (?:-|"%{S3_REQUEST_LINE:aws.s3access.request_uri}") (?:-|%{INT:http.response.status_code:int}) (?:-|%{NOTSPACE:aws.s3access.error_code}) (?:-|%{INT:aws.s3access.bytes_sent:long}) (?:-|%{INT:aws.s3access.object_size:long}) (?:-|%{INT:aws.s3access.total_time:int}) (?:-|%{INT:aws.s3access.turn_around_time:int}) "(?:-|%{DATA:http.request.referrer})" "(?:-|%{DATA:user_agent.original})" (?:-|%{NOTSPACE:aws.s3access.version_id})(?: (?:-|%{NOTSPACE:aws.s3access.host_id}) (?:-|%{NOTSPACE:aws.s3access.signature_version}) (?:-|%{NOTSPACE:tls.cipher}) (?:-|%{NOTSPACE:aws.s3access.authentication_type}) (?:-|%{NOTSPACE:aws.s3access.host_header}) (?:-|%{NOTSPACE:aws.s3access.tls_version}))? +# :long - %{INT:aws.s3access.bytes_sent:int} +# :long - %{INT:aws.s3access.object_size:int} + +ELB_URIHOST %{IPORHOST:url.domain}(?::%{POSINT:url.port:int})? +ELB_URIPATHQUERY %{URIPATH:url.path}(?:\?%{URIQUERY:url.query})? +# deprecated - old name: +ELB_URIPATHPARAM %{ELB_URIPATHQUERY} +ELB_URI %{URIPROTO:url.scheme}://(?:%{USER:url.username}(?::^@*)?@)?(?:%{ELB_URIHOST})?(?:%{ELB_URIPATHQUERY})? + +ELB_REQUEST_LINE (?:%{WORD:http.request.method} %{ELB_URI:url.original}(?: HTTP/%{NUMBER:http.version})?) + +# pattern supports 'regular' HTTP ELB format +ELB_V1_HTTP_LOG %{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:aws.elb.name} %{IP:source.ip}:%{INT:source.port:int} (?:-|(?:%{IP:aws.elb.backend.ip}:%{INT:aws.elb.backend.port:int})) (?:-1|%{NUMBER:aws.elb.request_processing_time.sec:float}) (?:-1|%{NUMBER:aws.elb.backend_processing_time.sec:float}) (?:-1|%{NUMBER:aws.elb.response_processing_time.sec:float}) %{INT:http.response.status_code:int} (?:-|%{INT:aws.elb.backend.http.response.status_code:int}) %{INT:http.request.body.bytes:long} %{INT:http.response.body.bytes:long} "%{ELB_REQUEST_LINE}"(?: "(?:-|%{DATA:user_agent.original})" (?:-|%{NOTSPACE:tls.cipher}) (?:-|%{NOTSPACE:aws.elb.ssl_protocol}))? +# :long - %{INT:http.request.body.bytes:int} +# :long - %{INT:http.response.body.bytes:int} + +ELB_ACCESS_LOG %{ELB_V1_HTTP_LOG} + +# pattern used to match a shorted format, that's why we have the optional part (starting with *http.version*) at the end +CLOUDFRONT_ACCESS_LOG (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY}\t%{TIME})\t%{WORD:aws.cloudfront.x_edge_location}\t(?:-|%{INT:destination.bytes:long})\t%{IPORHOST:source.ip}\t%{WORD:http.request.method}\t%{HOSTNAME:url.domain}\t%{NOTSPACE:url.path}\t(?:(?:000)|%{INT:http.response.status_code:int})\t(?:-|%{DATA:http.request.referrer})\t%{DATA:user_agent.original}\t(?:-|%{DATA:url.query})\t(?:-|%{DATA:aws.cloudfront.http.request.cookie})\t%{WORD:aws.cloudfront.x_edge_result_type}\t%{NOTSPACE:aws.cloudfront.x_edge_request_id}\t%{HOSTNAME:aws.cloudfront.http.request.host}\t%{URIPROTO:network.protocol}\t(?:-|%{INT:source.bytes:long})\t%{NUMBER:aws.cloudfront.time_taken:float}\t(?:-|%{IP:network.forwarded_ip})\t(?:-|%{DATA:aws.cloudfront.ssl_protocol})\t(?:-|%{NOTSPACE:tls.cipher})\t%{WORD:aws.cloudfront.x_edge_response_result_type}(?:\t(?:-|HTTP/%{NUMBER:http.version})\t(?:-|%{DATA:aws.cloudfront.fle_status})\t(?:-|%{DATA:aws.cloudfront.fle_encrypted_fields})\t%{INT:source.port:int}\t%{NUMBER:aws.cloudfront.time_to_first_byte:float}\t(?:-|%{DATA:aws.cloudfront.x_edge_detailed_result_type})\t(?:-|%{NOTSPACE:http.request.mime_type})\t(?:-|%{INT:aws.cloudfront.http.request.size:long})\t(?:-|%{INT:aws.cloudfront.http.request.range.start:long})\t(?:-|%{INT:aws.cloudfront.http.request.range.end:long}))? +# :long - %{INT:destination.bytes:int} +# :long - %{INT:source.bytes:int} +# :long - %{INT:aws.cloudfront.http.request.size:int} +# :long - %{INT:aws.cloudfront.http.request.range.start:int} +# :long - %{INT:aws.cloudfront.http.request.range.end:int} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/bacula b/libs/grok/src/main/resources/patterns/ecs-v1/bacula new file mode 100644 index 0000000000000..169defdecea3a --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/bacula @@ -0,0 +1,53 @@ +BACULA_TIMESTAMP %{MONTHDAY}-%{MONTH}(?:-%{YEAR})? %{HOUR}:%{MINUTE} +BACULA_HOST %{HOSTNAME} +BACULA_VOLUME %{USER} +BACULA_DEVICE %{USER} +BACULA_DEVICEPATH %{UNIXPATH} +BACULA_CAPACITY %{INT}{1,3}(,%{INT}{3})* +BACULA_VERSION %{USER} +BACULA_JOB %{USER} + +BACULA_LOG_MAX_CAPACITY User defined maximum volume capacity %{BACULA_CAPACITY:bacula.volume.max_capacity} exceeded on device \"%{BACULA_DEVICE:bacula.volume.device}\" \(%{BACULA_DEVICEPATH:bacula.volume.path}\).? +BACULA_LOG_END_VOLUME End of medium on Volume \"%{BACULA_VOLUME:bacula.volume.name}\" Bytes=%{BACULA_CAPACITY:bacula.volume.bytes} Blocks=%{BACULA_CAPACITY:bacula.volume.blocks} at %{BACULA_TIMESTAMP:bacula.timestamp}. +BACULA_LOG_NEW_VOLUME Created new Volume \"%{BACULA_VOLUME:bacula.volume.name}\" in catalog. +BACULA_LOG_NEW_LABEL Labeled new Volume \"%{BACULA_VOLUME:bacula.volume.name}\" on (?:file )?device \"%{BACULA_DEVICE:bacula.volume.device}\" \(%{BACULA_DEVICEPATH:bacula.volume.path}\). +BACULA_LOG_WROTE_LABEL Wrote label to prelabeled Volume \"%{BACULA_VOLUME:bacula.volume.name}\" on device \"%{BACULA_DEVICE:bacula.volume.device}\" \(%{BACULA_DEVICEPATH:bacula.volume.path}\) +BACULA_LOG_NEW_MOUNT New volume \"%{BACULA_VOLUME:bacula.volume.name}\" mounted on device \"%{BACULA_DEVICE:bacula.volume.device}\" \(%{BACULA_DEVICEPATH:bacula.volume.path}\) at %{BACULA_TIMESTAMP:bacula.timestamp}. +BACULA_LOG_NOOPEN \s*Cannot open %{DATA}: ERR=%{GREEDYDATA:error.message} +BACULA_LOG_NOOPENDIR \s*Could not open directory \"?%{DATA:file.path}\"?: ERR=%{GREEDYDATA:error.message} +BACULA_LOG_NOSTAT \s*Could not stat %{DATA:file.path}: ERR=%{GREEDYDATA:error.message} +BACULA_LOG_NOJOBS There are no more Jobs associated with Volume \"%{BACULA_VOLUME:bacula.volume.name}\". Marking it purged. +BACULA_LOG_ALL_RECORDS_PRUNED .*?All records pruned from Volume \"%{BACULA_VOLUME:bacula.volume.name}\"; marking it \"Purged\" +BACULA_LOG_BEGIN_PRUNE_JOBS Begin pruning Jobs older than %{INT} month %{INT} days . +BACULA_LOG_BEGIN_PRUNE_FILES Begin pruning Files. +BACULA_LOG_PRUNED_JOBS Pruned %{INT} Jobs* for client %{BACULA_HOST:bacula.client.name} from catalog. +BACULA_LOG_PRUNED_FILES Pruned Files from %{INT} Jobs* for client %{BACULA_HOST:bacula.client.name} from catalog. +BACULA_LOG_ENDPRUNE End auto prune. +BACULA_LOG_STARTJOB Start Backup JobId %{INT}, Job=%{BACULA_JOB:bacula.job.name} +BACULA_LOG_STARTRESTORE Start Restore Job %{BACULA_JOB:bacula.job.name} +BACULA_LOG_USEDEVICE Using Device \"%{BACULA_DEVICE:bacula.volume.device}\" +BACULA_LOG_DIFF_FS \s*%{UNIXPATH} is a different filesystem. Will not descend from %{UNIXPATH} into it. +BACULA_LOG_JOBEND Job write elapsed time = %{DATA:bacula.job.elapsed_time}, Transfer rate = %{NUMBER} (K|M|G)? Bytes/second +BACULA_LOG_NOPRUNE_JOBS No Jobs found to prune. +BACULA_LOG_NOPRUNE_FILES No Files found to prune. +BACULA_LOG_VOLUME_PREVWRITTEN Volume \"?%{BACULA_VOLUME:bacula.volume.name}\"? previously written, moving to end of data. +BACULA_LOG_READYAPPEND Ready to append to end of Volume \"%{BACULA_VOLUME:bacula.volume.name}\" size=%{INT:bacula.volume.size:long} +# :long - %{INT:bacula.volume.size:int} +BACULA_LOG_CANCELLING Cancelling duplicate JobId=%{INT:bacula.job.other_id}. +BACULA_LOG_MARKCANCEL JobId %{INT:bacula.job.id}, Job %{BACULA_JOB:bacula.job.name} marked to be canceled. +BACULA_LOG_CLIENT_RBJ shell command: run ClientRunBeforeJob \"%{GREEDYDATA:bacula.job.client_run_before_command}\" +BACULA_LOG_VSS (Generate )?VSS (Writer)? +BACULA_LOG_MAXSTART Fatal [eE]rror: Job canceled because max start delay time exceeded. +BACULA_LOG_DUPLICATE Fatal [eE]rror: JobId %{INT:bacula.job.other_id} already running. Duplicate job not allowed. +BACULA_LOG_NOJOBSTAT Fatal [eE]rror: No Job status returned from FD. +BACULA_LOG_FATAL_CONN Fatal [eE]rror: bsock.c:133 Unable to connect to (Client: %{BACULA_HOST:bacula.client.name}|Storage daemon) on %{IPORHOST:client.address}:%{POSINT:client.port:int}. ERR=%{GREEDYDATA:error.message} +BACULA_LOG_NO_CONNECT Warning: bsock.c:127 Could not connect to (Client: %{BACULA_HOST:bacula.client.name}|Storage daemon) on %{IPORHOST:client.address}:%{POSINT:client.port:int}. ERR=%{GREEDYDATA:error.message} +BACULA_LOG_NO_AUTH Fatal error: Unable to authenticate with File daemon at \"?%{IPORHOST:client.address}(?::%{POSINT:client.port:int})?\"?. Possible causes: +BACULA_LOG_NOSUIT No prior or suitable Full backup found in catalog. Doing FULL backup. +BACULA_LOG_NOPRIOR No prior Full backup Job record found. + +BACULA_LOG_JOB (Error: )?Bacula %{BACULA_HOST} %{BACULA_VERSION} \(%{BACULA_VERSION}\): + +BACULA_LOG %{BACULA_TIMESTAMP:timestamp} %{BACULA_HOST:host.hostname}(?: JobId %{INT:bacula.job.id})?:? (%{BACULA_LOG_MAX_CAPACITY}|%{BACULA_LOG_END_VOLUME}|%{BACULA_LOG_NEW_VOLUME}|%{BACULA_LOG_NEW_LABEL}|%{BACULA_LOG_WROTE_LABEL}|%{BACULA_LOG_NEW_MOUNT}|%{BACULA_LOG_NOOPEN}|%{BACULA_LOG_NOOPENDIR}|%{BACULA_LOG_NOSTAT}|%{BACULA_LOG_NOJOBS}|%{BACULA_LOG_ALL_RECORDS_PRUNED}|%{BACULA_LOG_BEGIN_PRUNE_JOBS}|%{BACULA_LOG_BEGIN_PRUNE_FILES}|%{BACULA_LOG_PRUNED_JOBS}|%{BACULA_LOG_PRUNED_FILES}|%{BACULA_LOG_ENDPRUNE}|%{BACULA_LOG_STARTJOB}|%{BACULA_LOG_STARTRESTORE}|%{BACULA_LOG_USEDEVICE}|%{BACULA_LOG_DIFF_FS}|%{BACULA_LOG_JOBEND}|%{BACULA_LOG_NOPRUNE_JOBS}|%{BACULA_LOG_NOPRUNE_FILES}|%{BACULA_LOG_VOLUME_PREVWRITTEN}|%{BACULA_LOG_READYAPPEND}|%{BACULA_LOG_CANCELLING}|%{BACULA_LOG_MARKCANCEL}|%{BACULA_LOG_CLIENT_RBJ}|%{BACULA_LOG_VSS}|%{BACULA_LOG_MAXSTART}|%{BACULA_LOG_DUPLICATE}|%{BACULA_LOG_NOJOBSTAT}|%{BACULA_LOG_FATAL_CONN}|%{BACULA_LOG_NO_CONNECT}|%{BACULA_LOG_NO_AUTH}|%{BACULA_LOG_NOSUIT}|%{BACULA_LOG_JOB}|%{BACULA_LOG_NOPRIOR}) +# old (deprecated) name : +BACULA_LOGLINE %{BACULA_LOG} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/bind b/libs/grok/src/main/resources/patterns/ecs-v1/bind new file mode 100644 index 0000000000000..ec212de118ddb --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/bind @@ -0,0 +1,13 @@ +BIND9_TIMESTAMP %{MONTHDAY}[-]%{MONTH}[-]%{YEAR} %{TIME} + +BIND9_DNSTYPE (?:A|AAAA|CAA|CDNSKEY|CDS|CERT|CNAME|CSYNC|DLV|DNAME|DNSKEY|DS|HINFO|LOC|MX|NAPTR|NS|NSEC|NSEC3|OPENPGPKEY|PTR|RRSIG|RP|SIG|SMIMEA|SOA|SRV|TSIG|TXT|URI) +BIND9_CATEGORY (?:queries) + +# dns.question.class is static - only 'IN' is supported by Bind9 +# bind.log.question.name is expected to be a 'duplicate' (same as the dns.question.name capture) +BIND9_QUERYLOGBASE client(:? @0x(?:[0-9A-Fa-f]+))? %{IP:client.ip}#%{POSINT:client.port:int} \(%{GREEDYDATA:bind.log.question.name}\): query: %{GREEDYDATA:dns.question.name} (?IN) %{BIND9_DNSTYPE:dns.question.type}(:? %{DATA:bind.log.question.flags})? \(%{IP:server.ip}\) + +# for query-logging category and severity are always fixed as "queries: info: " +BIND9_QUERYLOG %{BIND9_TIMESTAMP:timestamp} %{BIND9_CATEGORY:bing.log.category}: %{LOGLEVEL:log.level}: %{BIND9_QUERYLOGBASE} + +BIND9 %{BIND9_QUERYLOG} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/bro b/libs/grok/src/main/resources/patterns/ecs-v1/bro new file mode 100644 index 0000000000000..dc38d5a7fba59 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/bro @@ -0,0 +1,30 @@ +# supports the 'old' BRO log files, for updated Zeek log format see the patters/ecs-v1/zeek +# https://www.bro.org/sphinx/script-reference/log-files.html + +BRO_BOOL [TF] +BRO_DATA [^\t]+ + +# http.log - old format (before the Zeek rename) : +BRO_HTTP %{NUMBER:timestamp}\t%{NOTSPACE:zeek.session_id}\t%{IP:source.ip}\t%{INT:source.port:int}\t%{IP:destination.ip}\t%{INT:destination.port:int}\t%{INT:zeek.http.trans_depth:int}\t(?:-|%{WORD:http.request.method})\t(?:-|%{BRO_DATA:url.domain})\t(?:-|%{BRO_DATA:url.original})\t(?:-|%{BRO_DATA:http.request.referrer})\t(?:-|%{BRO_DATA:user_agent.original})\t(?:-|%{NUMBER:http.request.body.bytes:long})\t(?:-|%{NUMBER:http.response.body.bytes:long})\t(?:-|%{POSINT:http.response.status_code:int})\t(?:-|%{DATA:zeek.http.status_msg})\t(?:-|%{POSINT:zeek.http.info_code:int})\t(?:-|%{DATA:zeek.http.info_msg})\t(?:-|%{BRO_DATA:zeek.http.filename})\t(?:\(empty\)|%{BRO_DATA:zeek.http.tags})\t(?:-|%{BRO_DATA:url.username})\t(?:-|%{BRO_DATA:url.password})\t(?:-|%{BRO_DATA:zeek.http.proxied})\t(?:-|%{BRO_DATA:zeek.http.orig_fuids})\t(?:-|%{BRO_DATA:http.request.mime_type})\t(?:-|%{BRO_DATA:zeek.http.resp_fuids})\t(?:-|%{BRO_DATA:http.response.mime_type}) +# :long - %{NUMBER:http.request.body.bytes:int} +# :long - %{NUMBER:http.response.body.bytes:int} + +# dns.log - old format +BRO_DNS %{NUMBER:timestamp}\t%{NOTSPACE:zeek.session_id}\t%{IP:source.ip}\t%{INT:source.port:int}\t%{IP:destination.ip}\t%{INT:destination.port:int}\t%{WORD:network.transport}\t(?:-|%{INT:dns.id:int})\t(?:-|%{BRO_DATA:dns.question.name})\t(?:-|%{INT:zeek.dns.qclass:int})\t(?:-|%{BRO_DATA:zeek.dns.qclass_name})\t(?:-|%{INT:zeek.dns.qtype:int})\t(?:-|%{BRO_DATA:dns.question.type})\t(?:-|%{INT:zeek.dns.rcode:int})\t(?:-|%{BRO_DATA:dns.response_code})\t(?:-|%{BRO_BOOL:zeek.dns.AA})\t(?:-|%{BRO_BOOL:zeek.dns.TC})\t(?:-|%{BRO_BOOL:zeek.dns.RD})\t(?:-|%{BRO_BOOL:zeek.dns.RA})\t(?:-|%{NONNEGINT:zeek.dns.Z:int})\t(?:-|%{BRO_DATA:zeek.dns.answers})\t(?:-|%{DATA:zeek.dns.TTLs})\t(?:-|%{BRO_BOOL:zeek.dns.rejected}) + +# conn.log - old bro, also supports 'newer' format (optional *zeek.connection.local_resp* flag) compared to non-ecs mode +BRO_CONN %{NUMBER:timestamp}\t%{NOTSPACE:zeek.session_id}\t%{IP:source.ip}\t%{INT:source.port:int}\t%{IP:destination.ip}\t%{INT:destination.port:int}\t%{WORD:network.transport}\t(?:-|%{BRO_DATA:network.protocol})\t(?:-|%{NUMBER:zeek.connection.duration:float})\t(?:-|%{INT:zeek.connection.orig_bytes:long})\t(?:-|%{INT:zeek.connection.resp_bytes:long})\t(?:-|%{BRO_DATA:zeek.connection.state})\t(?:-|%{BRO_BOOL:zeek.connection.local_orig})\t(?:(?:-|%{BRO_BOOL:zeek.connection.local_resp})\t)?(?:-|%{INT:zeek.connection.missed_bytes:long})\t(?:-|%{BRO_DATA:zeek.connection.history})\t(?:-|%{INT:source.packets:long})\t(?:-|%{INT:source.bytes:long})\t(?:-|%{INT:destination.packets:long})\t(?:-|%{INT:destination.bytes:long})\t(?:\(empty\)|%{BRO_DATA:zeek.connection.tunnel_parents}) +# :long - %{INT:zeek.connection.orig_bytes:int} +# :long - %{INT:zeek.connection.resp_bytes:int} +# :long - %{INT:zeek.connection.missed_bytes:int} +# :long - %{INT:source.packets:int} +# :long - %{INT:source.bytes:int} +# :long - %{INT:destination.packets:int} +# :long - %{INT:destination.bytes:int} + +# files.log - old format +BRO_FILES %{NUMBER:timestamp}\t%{NOTSPACE:zeek.files.fuid}\t(?:-|%{IP:server.ip})\t(?:-|%{IP:client.ip})\t(?:-|%{BRO_DATA:zeek.files.session_ids})\t(?:-|%{BRO_DATA:zeek.files.source})\t(?:-|%{INT:zeek.files.depth:int})\t(?:-|%{BRO_DATA:zeek.files.analyzers})\t(?:-|%{BRO_DATA:file.mime_type})\t(?:-|%{BRO_DATA:file.name})\t(?:-|%{NUMBER:zeek.files.duration:float})\t(?:-|%{BRO_DATA:zeek.files.local_orig})\t(?:-|%{BRO_BOOL:zeek.files.is_orig})\t(?:-|%{INT:zeek.files.seen_bytes:long})\t(?:-|%{INT:file.size:long})\t(?:-|%{INT:zeek.files.missing_bytes:long})\t(?:-|%{INT:zeek.files.overflow_bytes:long})\t(?:-|%{BRO_BOOL:zeek.files.timedout})\t(?:-|%{BRO_DATA:zeek.files.parent_fuid})\t(?:-|%{BRO_DATA:file.hash.md5})\t(?:-|%{BRO_DATA:file.hash.sha1})\t(?:-|%{BRO_DATA:file.hash.sha256})\t(?:-|%{BRO_DATA:zeek.files.extracted}) +# :long - %{INT:zeek.files.seen_bytes:int} +# :long - %{INT:file.size:int} +# :long - %{INT:zeek.files.missing_bytes:int} +# :long - %{INT:zeek.files.overflow_bytes:int} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/exim b/libs/grok/src/main/resources/patterns/ecs-v1/exim new file mode 100644 index 0000000000000..dba79503c0097 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/exim @@ -0,0 +1,26 @@ +EXIM_MSGID [0-9A-Za-z]{6}-[0-9A-Za-z]{6}-[0-9A-Za-z]{2} +# <= message arrival +# => normal message delivery +# -> additional address in same delivery +# *> delivery suppressed by -N +# ** delivery failed; address bounced +# == delivery deferred; temporary problem +EXIM_FLAGS (?:<=|=>|->|\*>|\*\*|==|<>|>>) +EXIM_DATE (:?%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME}) +EXIM_PID \[%{POSINT:process.pid:int}\] +EXIM_QT ((\d+y)?(\d+w)?(\d+d)?(\d+h)?(\d+m)?(\d+s)?) +EXIM_EXCLUDE_TERMS (Message is frozen|(Start|End) queue run| Warning: | retry time not reached | no (IP address|host name) found for (IP address|host) | unexpected disconnection while reading SMTP command | no immediate delivery: |another process is handling this message) +EXIM_REMOTE_HOST (H=(%{NOTSPACE:source.address} )?(\(%{NOTSPACE:exim.log.remote_address}\) )?\%{IP:source.ip}\](?::%{POSINT:source.port:int})?) +EXIM_INTERFACE (I=\[%{IP:destination.ip}\](?::%{NUMBER:destination.port:int})) +EXIM_PROTOCOL (P=%{NOTSPACE:network.protocol}) +EXIM_MSG_SIZE (S=%{NUMBER:exim.log.message.size:int}) +EXIM_HEADER_ID (id=%{NOTSPACE:exim.log.header_id}) +EXIM_QUOTED_CONTENT (?:\\.|[^\\"])* +EXIM_SUBJECT (T="%{EXIM_QUOTED_CONTENT:exim.log.message.subject}") + +EXIM_UNKNOWN_FIELD (?:[A-Za-z0-9]{1,4}=(?:%{QUOTEDSTRING}|%{NOTSPACE})) +EXIM_NAMED_FIELDS (?: (?:%{EXIM_REMOTE_HOST}|%{EXIM_INTERFACE}|%{EXIM_PROTOCOL}|%{EXIM_MSG_SIZE}|%{EXIM_HEADER_ID}|%{EXIM_SUBJECT}|%{EXIM_UNKNOWN_FIELD}))* + +EXIM_MESSAGE_ARRIVAL %{EXIM_DATE:timestamp} (?:%{EXIM_PID} )?%{EXIM_MSGID:exim.log.message.id} (?<=) (?[a-z:] )?%{EMAILADDRESS:exim.log.sender.email}%{EXIM_NAMED_FIELDS}(?:(?: from ?)? for %{EMAILADDRESS:exim.log.recipient.email})? + +EXIM %{EXIM_MESSAGE_ARRIVAL} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/firewalls b/libs/grok/src/main/resources/patterns/ecs-v1/firewalls new file mode 100644 index 0000000000000..892b3a506825d --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/firewalls @@ -0,0 +1,111 @@ +# NetScreen firewall logs +NETSCREENSESSIONLOG %{SYSLOGTIMESTAMP:timestamp} %{IPORHOST:observer.hostname} %{NOTSPACE:observer.name}: (?NetScreen) device_id=%{WORD:netscreen.device_id} .*?(system-\w+-%{NONNEGINT:event.code}\(%{WORD:netscreen.session.type}\))?: start_time="%{DATA:netscreen.session.start_time}" duration=%{INT:netscreen.session.duration:int} policy_id=%{INT:netscreen.policy_id} service=%{DATA:netscreen.service} proto=%{INT:netscreen.protocol_number:int} src zone=%{WORD:observer.ingress.zone} dst zone=%{WORD:observer.egress.zone} action=%{WORD:event.action} sent=%{INT:source.bytes:long} rcvd=%{INT:destination.bytes:long} src=%{IPORHOST:source.address} dst=%{IPORHOST:destination.address}(?: src_port=%{INT:source.port:int} dst_port=%{INT:destination.port:int})?(?: src-xlated ip=%{IP:source.nat.ip} port=%{INT:source.nat.port:int} dst-xlated ip=%{IP:destination.nat.ip} port=%{INT:destination.nat.port:int})?(?: session_id=%{INT:netscreen.session.id} reason=%{GREEDYDATA:netscreen.session.reason})? +# :long - %{INT:source.bytes:int} +# :long - %{INT:destination.bytes:int} + +#== Cisco ASA == +CISCO_TAGGED_SYSLOG ^<%{POSINT:log.syslog.priority:int}>%{CISCOTIMESTAMP:timestamp}( %{SYSLOGHOST:host.hostname})? ?: %%{CISCOTAG:cisco.asa.tag}: +CISCOTIMESTAMP %{MONTH} +%{MONTHDAY}(?: %{YEAR})? %{TIME} +CISCOTAG [A-Z0-9]+-%{INT}-(?:[A-Z0-9_]+) +# Common Particles +CISCO_ACTION Built|Teardown|Deny|Denied|denied|requested|permitted|denied by ACL|discarded|est-allowed|Dropping|created|deleted +CISCO_REASON Duplicate TCP SYN|Failed to locate egress interface|Invalid transport field|No matching connection|DNS Response|DNS Query|(?:%{WORD}\s*)* +CISCO_DIRECTION Inbound|inbound|Outbound|outbound +CISCO_INTERVAL first hit|%{INT}-second interval +CISCO_XLATE_TYPE static|dynamic +# helpers +CISCO_HITCOUNT_INTERVAL hit-cnt %{INT:cisco.asa.hit_count:int} (?:first hit|%{INT:cisco.asa.interval:int}-second interval) +CISCO_SRC_IP_USER %{NOTSPACE:observer.ingress.interface.name}:%{IP:source.ip}(?:\(%{DATA:source.user.name}\))? +CISCO_DST_IP_USER %{NOTSPACE:observer.egress.interface.name}:%{IP:destination.ip}(?:\(%{DATA:destination.user.name}\))? +CISCO_SRC_HOST_PORT_USER %{NOTSPACE:observer.ingress.interface.name}:(?:(?:%{IP:source.ip})|(?:%{HOSTNAME:source.address}))(?:/%{INT:source.port:int})?(?:\(%{DATA:source.user.name}\))? +CISCO_DST_HOST_PORT_USER %{NOTSPACE:observer.egress.interface.name}:(?:(?:%{IP:destination.ip})|(?:%{HOSTNAME:destination.address}))(?:/%{INT:destination.port:int})?(?:\(%{DATA:destination.user.name}\))? +# ASA-1-104001 +CISCOFW104001 \((?:Primary|Secondary)\) Switching to ACTIVE - %{GREEDYDATA:event.reason} +# ASA-1-104002 +CISCOFW104002 \((?:Primary|Secondary)\) Switching to STANDBY - %{GREEDYDATA:event.reason} +# ASA-1-104003 +CISCOFW104003 \((?:Primary|Secondary)\) Switching to FAILED\. +# ASA-1-104004 +CISCOFW104004 \((?:Primary|Secondary)\) Switching to OK\. +# ASA-1-105003 +CISCOFW105003 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{NOTSPACE:network.interface.name} waiting +# ASA-1-105004 +CISCOFW105004 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{NOTSPACE:network.interface.name} normal +# ASA-1-105005 +CISCOFW105005 \((?:Primary|Secondary)\) Lost Failover communications with mate on [Ii]nterface %{NOTSPACE:network.interface.name} +# ASA-1-105008 +CISCOFW105008 \((?:Primary|Secondary)\) Testing [Ii]nterface %{NOTSPACE:network.interface.name} +# ASA-1-105009 +CISCOFW105009 \((?:Primary|Secondary)\) Testing on [Ii]nterface %{NOTSPACE:network.interface.name} (?:Passed|Failed) +# ASA-2-106001 +CISCOFW106001 %{CISCO_DIRECTION:cisco.asa.network.direction} %{WORD:cisco.asa.network.transport} connection %{CISCO_ACTION:cisco.asa.outcome} from %{IP:source.ip}/%{INT:source.port:int} to %{IP:destination.ip}/%{INT:destination.port:int} flags %{DATA:cisco.asa.tcp_flags} on interface %{NOTSPACE:observer.egress.interface.name} +# ASA-2-106006, ASA-2-106007, ASA-2-106010 +CISCOFW106006_106007_106010 %{CISCO_ACTION:cisco.asa.outcome} %{CISCO_DIRECTION:cisco.asa.network.direction} %{WORD:cisco.asa.network.transport} (?:from|src) %{IP:source.ip}/%{INT:source.port:int}(?:\(%{DATA:source.user.name}\))? (?:to|dst) %{IP:destination.ip}/%{INT:destination.port:int}(?:\(%{DATA:destination.user.name}\))? (?:(?:on interface %{NOTSPACE:observer.egress.interface.name})|(?:due to %{CISCO_REASON:event.reason})) +# ASA-3-106014 +CISCOFW106014 %{CISCO_ACTION:cisco.asa.outcome} %{CISCO_DIRECTION:cisco.asa.network.direction} %{WORD:cisco.asa.network.transport} src %{CISCO_SRC_IP_USER} dst %{CISCO_DST_IP_USER}\s?\(type %{INT:cisco.asa.icmp_type:int}, code %{INT:cisco.asa.icmp_code:int}\) +# ASA-6-106015 +CISCOFW106015 %{CISCO_ACTION:cisco.asa.outcome} %{WORD:cisco.asa.network.transport} \(%{DATA:cisco.asa.rule_name}\) from %{IP:source.ip}/%{INT:source.port:int} to %{IP:destination.ip}/%{INT:destination.port:int} flags %{DATA:cisco.asa.tcp_flags} on interface %{NOTSPACE:observer.egress.interface.name} +# ASA-1-106021 +CISCOFW106021 %{CISCO_ACTION:cisco.asa.outcome} %{WORD:cisco.asa.network.transport} reverse path check from %{IP:source.ip} to %{IP:destination.ip} on interface %{NOTSPACE:observer.egress.interface.name} +# ASA-4-106023 +CISCOFW106023 %{CISCO_ACTION:cisco.asa.outcome}(?: protocol)? %{WORD:cisco.asa.network.transport} src %{CISCO_SRC_HOST_PORT_USER} dst %{CISCO_DST_HOST_PORT_USER}( \(type %{INT:cisco.asa.icmp_type:int}, code %{INT:cisco.asa.icmp_code:int}\))? by access-group "?%{DATA:cisco.asa.rule_name}"? \%{DATA:[@metadata.cisco.asa.hashcode1}, %{DATA:@metadata.cisco.asa.hashcode2}\] +# ASA-4-106100, ASA-4-106102, ASA-4-106103 +CISCOFW106100_2_3 access-list %{NOTSPACE:cisco.asa.rule_name} %{CISCO_ACTION:cisco.asa.outcome} %{WORD:cisco.asa.network.transport} for user '%{DATA:user.name}' %{DATA:observer.ingress.interface.name}/%{IP:source.ip}\(%{INT:source.port:int}\) -> %{DATA:observer.egress.interface.name}/%{IP:destination.ip}\(%{INT:destination.port:int}\) %{CISCO_HITCOUNT_INTERVAL} \%{DATA:[@metadata.cisco.asa.hashcode1}, %{DATA:@metadata.cisco.asa.hashcode2}\] +# ASA-5-106100 +CISCOFW106100 access-list %{NOTSPACE:cisco.asa.rule_name} %{CISCO_ACTION:cisco.asa.outcome} %{WORD:cisco.asa.network.transport} %{DATA:observer.ingress.interface.name}/%{IP:source.ip}\(%{INT:source.port:int}\)(?:\(%{DATA:source.user.name}\))? -> %{DATA:observer.egress.interface.name}/%{IP:destination.ip}\(%{INT:destination.port:int}\)(?:\(%{DATA:source.user.name}\))? hit-cnt %{INT:cisco.asa.hit_count:int} %{CISCO_INTERVAL} \%{DATA:[@metadata.cisco.asa.hashcode1}, %{DATA:@metadata.cisco.asa.hashcode2}\] +# ASA-5-304001 +CISCOFW304001 %{IP:source.ip}(?:\(%{DATA:source.user.name}\))? Accessed URL %{IP:destination.ip}:%{GREEDYDATA:url.original} +# ASA-6-110002 +CISCOFW110002 %{CISCO_REASON:event.reason} for %{WORD:cisco.asa.network.transport} from %{DATA:observer.ingress.interface.name}:%{IP:source.ip}/%{INT:source.port:int} to %{IP:destination.ip}/%{INT:destination.port:int} +# ASA-6-302010 +CISCOFW302010 %{INT:cisco.asa.connections.in_use:int} in use, %{INT:cisco.asa.connections.most_used:int} most used +# ASA-6-302013, ASA-6-302014, ASA-6-302015, ASA-6-302016 +CISCOFW302013_302014_302015_302016 %{CISCO_ACTION:cisco.asa.outcome}(?: %{CISCO_DIRECTION:cisco.asa.network.direction})? %{WORD:cisco.asa.network.transport} connection %{INT:cisco.asa.connection_id} for %{NOTSPACE:observer.ingress.interface.name}:%{IP:source.ip}/%{INT:source.port:int}(?: \(%{IP:source.nat.ip}/%{INT:source.nat.port:int}\))?(?:\(%{DATA:source.user.name?}\))? to %{NOTSPACE:observer.egress.interface.name}:%{IP:destination.ip}/%{INT:destination.port:int}( \(%{IP:destination.nat.ip}/%{INT:destination.nat.port:int}\))?(?:\(%{DATA:destination.user.name}\))?( duration %{TIME:cisco.asa.duration} bytes %{INT:network.bytes:long})?(?: %{CISCO_REASON:event.reason})?(?: \(%{DATA:user.name}\))? +# :long - %{INT:network.bytes:int} +# ASA-6-302020, ASA-6-302021 +CISCOFW302020_302021 %{CISCO_ACTION:cisco.asa.outcome}(?: %{CISCO_DIRECTION:cisco.asa.network.direction})? %{WORD:cisco.asa.network.transport} connection for faddr %{IP:destination.ip}/%{INT:cisco.asa.icmp_seq:int}(?:\(%{DATA:destination.user.name}\))? gaddr %{IP:source.nat.ip}/%{INT:cisco.asa.icmp_type:int} laddr %{IP:source.ip}/%{INT}(?: \(%{DATA:source.user.name}\))? +# ASA-6-305011 +CISCOFW305011 %{CISCO_ACTION:cisco.asa.outcome} %{CISCO_XLATE_TYPE} %{WORD:cisco.asa.network.transport} translation from %{DATA:observer.ingress.interface.name}:%{IP:source.ip}(/%{INT:source.port:int})?(?:\(%{DATA:source.user.name}\))? to %{DATA:observer.egress.interface.name}:%{IP:destination.ip}/%{INT:destination.port:int} +# ASA-3-313001, ASA-3-313004, ASA-3-313008 +CISCOFW313001_313004_313008 %{CISCO_ACTION:cisco.asa.outcome} %{WORD:cisco.asa.network.transport} type=%{INT:cisco.asa.icmp_type:int}, code=%{INT:cisco.asa.icmp_code:int} from %{IP:source.ip} on interface %{NOTSPACE:observer.egress.interface.name}(?: to %{IP:destination.ip})? +# ASA-4-313005 +CISCOFW313005 %{CISCO_REASON:event.reason} for %{WORD:cisco.asa.network.transport} error message: %{WORD} src %{CISCO_SRC_IP_USER} dst %{CISCO_DST_IP_USER} \(type %{INT:cisco.asa.icmp_type:int}, code %{INT:cisco.asa.icmp_code:int}\) on %{NOTSPACE} interface\.\s+Original IP payload: %{WORD:cisco.asa.original_ip_payload.network.transport} src %{IP:cisco.asa.original_ip_payload.source.ip}/%{INT:cisco.asa.original_ip_payload.source.port:int}(?:\(%{DATA:cisco.asa.original_ip_payload.source.user.name}\))? dst %{IP:cisco.asa.original_ip_payload.destination.ip}/%{INT:cisco.asa.original_ip_payload.destination.port:int}(?:\(%{DATA:cisco.asa.original_ip_payload.destination.user.name}\))? +# ASA-5-321001 +CISCOFW321001 Resource '%{DATA:cisco.asa.resource.name}' limit of %{POSINT:cisco.asa.resource.limit:int} reached for system +# ASA-4-402117 +CISCOFW402117 %{WORD:cisco.asa.network.type}: Received a non-IPSec packet \(protocol=\s?%{WORD:cisco.asa.network.transport}\) from %{IP:source.ip} to %{IP:destination.ip}\.? +# ASA-4-402119 +CISCOFW402119 %{WORD:cisco.asa.network.type}: Received an %{WORD:cisco.asa.ipsec.protocol} packet \(SPI=\s?%{DATA:cisco.asa.ipsec.spi}, sequence number=\s?%{DATA:cisco.asa.ipsec.seq_num}\) from %{IP:source.ip} \(user=\s?%{DATA:source.user.name}\) to %{IP:destination.ip} that failed anti-replay checking\.? +# ASA-4-419001 +CISCOFW419001 %{CISCO_ACTION:cisco.asa.outcome} %{WORD:cisco.asa.network.transport} packet from %{NOTSPACE:observer.ingress.interface.name}:%{IP:source.ip}/%{INT:source.port:int} to %{NOTSPACE:observer.egress.interface.name}:%{IP:destination.ip}/%{INT:destination.port:int}, reason: %{GREEDYDATA:event.reason} +# ASA-4-419002 +CISCOFW419002 %{CISCO_REASON:event.reason} from %{DATA:observer.ingress.interface.name}:%{IP:source.ip}/%{INT:source.port:int} to %{DATA:observer.egress.interface.name}:%{IP:destination.ip}/%{INT:destination.port:int} with different initial sequence number +# ASA-4-500004 +CISCOFW500004 %{CISCO_REASON:event.reason} for protocol=%{WORD:cisco.asa.network.transport}, from %{IP:source.ip}/%{INT:source.port:int} to %{IP:destination.ip}/%{INT:destination.port:int} +# ASA-6-602303, ASA-6-602304 +CISCOFW602303_602304 %{WORD:cisco.asa.network.type}: An %{CISCO_DIRECTION:cisco.asa.network.direction} %{DATA:cisco.asa.ipsec.tunnel_type} SA \(SPI=\s?%{DATA:cisco.asa.ipsec.spi}\) between %{IP:source.ip} and %{IP:destination.ip} \(user=\s?%{DATA:source.user.name}\) has been %{CISCO_ACTION:cisco.asa.outcome} +# ASA-7-710001, ASA-7-710002, ASA-7-710003, ASA-7-710005, ASA-7-710006 +CISCOFW710001_710002_710003_710005_710006 %{WORD:cisco.asa.network.transport} (?:request|access) %{CISCO_ACTION:cisco.asa.outcome} from %{IP:source.ip}/%{INT:source.port:int} to %{DATA:observer.egress.interface.name}:%{IP:destination.ip}/%{INT:destination.port:int} +# ASA-6-713172 +CISCOFW713172 Group = %{DATA:cisco.asa.source.group}, IP = %{IP:source.ip}, Automatic NAT Detection Status:\s+Remote end\s*%{DATA:@metadata.cisco.asa.remote_nat}\s*behind a NAT device\s+This\s+end\s*%{DATA:@metadata.cisco.asa.local_nat}\s*behind a NAT device +# ASA-4-733100 +CISCOFW733100 \\s*%{DATA:[cisco.asa.burst.object}\s*\] drop %{DATA:cisco.asa.burst.id} exceeded. Current burst rate is %{INT:cisco.asa.burst.current_rate:int} per second, max configured rate is %{INT:cisco.asa.burst.configured_rate:int}; Current average rate is %{INT:cisco.asa.burst.avg_rate:int} per second, max configured rate is %{INT:cisco.asa.burst.configured_avg_rate:int}; Cumulative total count is %{INT:cisco.asa.burst.cumulative_count:int} +#== End Cisco ASA == + + +IPTABLES_TCP_FLAGS (CWR |ECE |URG |ACK |PSH |RST |SYN |FIN )* +IPTABLES_TCP_PART (?:SEQ=%{INT:iptables.tcp.seq:int}\s+)?(?:ACK=%{INT:iptables.tcp.ack:int}\s+)?WINDOW=%{INT:iptables.tcp.window:int}\s+RES=0x%{BASE16NUM:iptables.tcp_reserved_bits}\s+%{IPTABLES_TCP_FLAGS:iptables.tcp.flags} + +IPTABLES4_FRAG (?:(?<= )(?:CE|DF|MF))* +IPTABLES4_PART SRC=%{IPV4:source.ip}\s+DST=%{IPV4:destination.ip}\s+LEN=(?:%{INT:iptables.length:int})?\s+TOS=(?:0|0x%{BASE16NUM:iptables.tos})?\s+PREC=(?:0x%{BASE16NUM:iptables.precedence_bits})?\s+TTL=(?:%{INT:iptables.ttl:int})?\s+ID=(?:%{INT:iptables.id})?\s+(?:%{IPTABLES4_FRAG:iptables.fragment_flags})?(?:\s+FRAG: %{INT:iptables.fragment_offset:int})? +IPTABLES6_PART SRC=%{IPV6:source.ip}\s+DST=%{IPV6:destination.ip}\s+LEN=(?:%{INT:iptables.length:int})?\s+TC=(?:0|0x%{BASE16NUM:iptables.tos})?\s+HOPLIMIT=(?:%{INT:iptables.ttl:int})?\s+FLOWLBL=(?:%{INT:iptables.flow_label})? + +IPTABLES IN=(?:%{NOTSPACE:observer.ingress.interface.name})?\s+OUT=(?:%{NOTSPACE:observer.egress.interface.name})?\s+(?:MAC=(?:%{COMMONMAC:destination.mac})?(?::%{COMMONMAC:source.mac})?(?::A-Fa-f0-9{2}:A-Fa-f0-9{2})?\s+)?(:?%{IPTABLES4_PART}|%{IPTABLES6_PART}).*?PROTO=(?:%{WORD:network.transport})?\s+SPT=(?:%{INT:source.port:int})?\s+DPT=(?:%{INT:destination.port:int})?\s+(?:%{IPTABLES_TCP_PART})? + +# Shorewall firewall logs +SHOREWALL (?:%{SYSLOGTIMESTAMP:timestamp}) (?:%{WORD:observer.hostname}) .*Shorewall:(?:%{WORD:shorewall.firewall.type})?:(?:%{WORD:shorewall.firewall.action})?.*%{IPTABLES} +#== End Shorewall +#== SuSE Firewall 2 == +SFW2_LOG_PREFIX SFW2\-INext\-%{NOTSPACE:suse.firewall.action} +SFW2 ((?:%{SYSLOGTIMESTAMP:timestamp})|(?:%{TIMESTAMP_ISO8601:timestamp}))\s*%{HOSTNAME:observer.hostname}.*?%{SFW2_LOG_PREFIX:suse.firewall.log_prefix}\s*%{IPTABLES} +#== End SuSE == diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/grok-patterns b/libs/grok/src/main/resources/patterns/ecs-v1/grok-patterns new file mode 100644 index 0000000000000..6f58f3ff4750d --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/grok-patterns @@ -0,0 +1,95 @@ +USERNAME [a-zA-Z0-9._-]+ +USER %{USERNAME} +EMAILLOCALPART [a-zA-Z0-9!#$%&'*+\-/=?^_`{|}~]{1,64}(?:\.[a-zA-Z0-9!#$%&'*+\-/=?^_`{|}~]{1,62}){0,63} +EMAILADDRESS %{EMAILLOCALPART}@%{HOSTNAME} +INT (?:[+-]?(?:[0-9]+)) +BASE10NUM (?[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+))) +NUMBER (?:%{BASE10NUM}) +BASE16NUM (?(?"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``)) +UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12} +# URN, allowing use of RFC 2141 section 2.3 reserved characters +URN urn:[0-9A-Za-z][0-9A-Za-z-]{0,31}:(?:%[0-9a-fA-F]{2}|[0-9A-Za-z()+,.:=@;$_!*'/?#-])+ + +# Networking +MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC}) +CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4}) +WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2}) +COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2}) +IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)? +IPV4 (?[A-Za-z]+:|\\)(?:\\[^\\?*]*)+ +URIPROTO [A-Za-z]([A-Za-z0-9+\-.]+)+ +URIHOST %{IPORHOST}(?::%{POSINT})? +# uripath comes loosely from RFC1738, but mostly from what Firefox doesn't turn into %XX +URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%&_\-]*)+ +URIQUERY [A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]<>]* +# deprecated (kept due compatibility): +URIPARAM \?%{URIQUERY} +URIPATHPARAM %{URIPATH}(?:\?%{URIQUERY})? +URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATH}(?:\?%{URIQUERY})?)? + +# Months: January, Feb, 3, 03, 12, December +MONTH \b(?:[Jj]an(?:uary|uar)?|[Ff]eb(?:ruary|ruar)?|[Mm](?:a|ä)?r(?:ch|z)?|[Aa]pr(?:il)?|[Mm]a(?:y|i)?|[Jj]un(?:e|i)?|[Jj]ul(?:y|i)?|[Aa]ug(?:ust)?|[Ss]ep(?:tember)?|[Oo](?:c|k)?t(?:ober)?|[Nn]ov(?:ember)?|[Dd]e(?:c|z)(?:ember)?)\b +MONTHNUM (?:0?[1-9]|1[0-2]) +MONTHNUM2 (?:0[1-9]|1[0-2]) +MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) + +# Days: Monday, Tue, Thu, etc... +DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?) + +# Years? +YEAR (?>\d\d){1,2} +HOUR (?:2[0123]|[01]?[0-9]) +MINUTE (?:[0-5][0-9]) +# '60' is a leap second in most time standards and thus is valid. +SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?) +TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) +# datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it) +DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR} +DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR} +ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE})) +ISO8601_SECOND %{SECOND} +TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}? +DATE %{DATE_US}|%{DATE_EU} +DATESTAMP %{DATE}[- ]%{TIME} +TZ (?:[APMCE][SD]T|UTC) +DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ} +DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE} +DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR} +DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND} + +# Syslog Dates: Month Day HH:MM:SS +SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME} +PROG [\x21-\x5a\x5c\x5e-\x7e]+ +SYSLOGPROG %{PROG:process.name}(?:\[%{POSINT:process.pid:int}\])? +SYSLOGHOST %{IPORHOST} +SYSLOGFACILITY <%{NONNEGINT:log.syslog.facility.code:int}.%{NONNEGINT:log.syslog.priority:int}> +HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT} + +# Shortcuts +QS %{QUOTEDSTRING} + +# Log formats +SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:host.hostname} %{SYSLOGPROG}: + +# Log Levels +LOGLEVEL ([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo?(?:rmation)?|INFO?(?:RMATION)?|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?) diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/haproxy b/libs/grok/src/main/resources/patterns/ecs-v1/haproxy new file mode 100644 index 0000000000000..f46d4ba945bb3 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/haproxy @@ -0,0 +1,40 @@ + +HAPROXYTIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) +HAPROXYDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{HAPROXYTIME}.%{INT} + +# Override these default patterns to parse out what is captured in your haproxy.cfg +HAPROXYCAPTUREDREQUESTHEADERS %{DATA:haproxy.http.request.captured_headers} +HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:haproxy.http.response.captured_headers} + +# Example: +# These haproxy config lines will add data to the logs that are captured +# by the patterns below. Place them in your custom patterns directory to +# override the defaults. +# +# capture request header Host len 40 +# capture request header X-Forwarded-For len 50 +# capture request header Accept-Language len 50 +# capture request header Referer len 200 +# capture request header User-Agent len 200 +# +# capture response header Content-Type len 30 +# capture response header Content-Encoding len 10 +# capture response header Cache-Control len 200 +# capture response header Last-Modified len 200 +# +# HAPROXYCAPTUREDREQUESTHEADERS %{DATA:haproxy.http.request.host}\|%{DATA:haproxy.http.request.x_forwarded_for}\|%{DATA:haproxy.http.request.accept_language}\|%{DATA:http.request.referrer}\|%{DATA:user_agent.original} +# HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:http.response.mime_type}\|%{DATA:haproxy.http.response.encoding}\|%{DATA:haproxy.http.response.cache_control}\|%{DATA:haproxy.http.response.last_modified} + +HAPROXYURI (?:%{URIPROTO:url.scheme}://)?(?:%{USER:url.username}(?::[^@]*)?@)?(?:%{IPORHOST:url.domain}(?::%{POSINT:url.port:int})?)?(?:%{URIPATH:url.path}(?:\?%{URIQUERY:url.query})?)? + +HAPROXYHTTPREQUESTLINE (?:|(?:%{WORD:http.request.method} %{HAPROXYURI:url.original}(?: HTTP/%{NUMBER:http.version})?)) + +# parse a haproxy 'httplog' line +HAPROXYHTTPBASE %{IP:source.address}:%{INT:source.port:int} \[%{HAPROXYDATE:haproxy.request_date}\] %{NOTSPACE:haproxy.frontend_name} %{NOTSPACE:haproxy.backend_name}/(?:|%{NOTSPACE:haproxy.server_name}) (?:-1|%{INT:haproxy.http.request.time_wait_ms:int})/(?:-1|%{INT:haproxy.total_waiting_time_ms:int})/(?:-1|%{INT:haproxy.connection_wait_time_ms:int})/(?:-1|%{INT:haproxy.http.request.time_wait_without_data_ms:int})/%{NOTSPACE:haproxy.total_time_ms} %{INT:http.response.status_code:int} %{INT:source.bytes:long} (?:-|%{DATA:haproxy.http.request.captured_cookie}) (?:-|%{DATA:haproxy.http.response.captured_cookie}) %{NOTSPACE:haproxy.termination_state} %{INT:haproxy.connections.active:int}/%{INT:haproxy.connections.frontend:int}/%{INT:haproxy.connections.backend:int}/%{INT:haproxy.connections.server:int}/%{INT:haproxy.connections.retries:int} %{INT:haproxy.server_queue:int}/%{INT:haproxy.backend_queue:int}(?: \{%{HAPROXYCAPTUREDREQUESTHEADERS}\}(?: \{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?)?(?: "%{HAPROXYHTTPREQUESTLINE}"?)? +# :long - %{INT:source.bytes:int} + +HAPROXYHTTP (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp}) %{IPORHOST:host.hostname} %{SYSLOGPROG}: %{HAPROXYHTTPBASE} + +# parse a haproxy 'tcplog' line +HAPROXYTCP (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp}) %{IPORHOST:host.hostname} %{SYSLOGPROG}: %{IP:source.address}:%{INT:source.port:int} \[%{HAPROXYDATE:haproxy.request_date}\] %{NOTSPACE:haproxy.frontend_name} %{NOTSPACE:haproxy.backend_name}/(?:|%{NOTSPACE:haproxy.server_name}) (?:-1|%{INT:haproxy.total_waiting_time_ms:int})/(?:-1|%{INT:haproxy.connection_wait_time_ms:int})/%{NOTSPACE:haproxy.total_time_ms} %{INT:source.bytes:long} %{NOTSPACE:haproxy.termination_state} %{INT:haproxy.connections.active:int}/%{INT:haproxy.connections.frontend:int}/%{INT:haproxy.connections.backend:int}/%{INT:haproxy.connections.server:int}/%{INT:haproxy.connections.retries:int} %{INT:haproxy.server_queue:int}/%{INT:haproxy.backend_queue:int} +# :long - %{INT:source.bytes:int} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/httpd b/libs/grok/src/main/resources/patterns/ecs-v1/httpd new file mode 100644 index 0000000000000..9b58e5096ad38 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/httpd @@ -0,0 +1,17 @@ +HTTPDUSER %{EMAILADDRESS}|%{USER} +HTTPDERROR_DATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR} + +# Log formats +HTTPD_COMMONLOG %{IPORHOST:source.address} (?:-|%{HTTPDUSER:apache.access.user.identity}) (?:-|%{HTTPDUSER:user.name}) \[%{HTTPDATE:timestamp}\] "(?:%{WORD:http.request.method} %{NOTSPACE:url.original}(?: HTTP/%{NUMBER:http.version})?|%{DATA})" (?:-|%{INT:http.response.status_code:int}) (?:-|%{INT:http.response.body.bytes:long}) +# :long - %{INT:http.response.body.bytes:int} +HTTPD_COMBINEDLOG %{HTTPD_COMMONLOG} "(?:-|%{DATA:http.request.referrer})" "(?:-|%{DATA:user_agent.original})" + +# Error logs +HTTPD20_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{LOGLEVEL:log.level}\] (?:\[client %{IPORHOST:source.address}\] )?%{GREEDYDATA:message} +HTTPD24_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[(?:%{WORD:apache.error.module})?:%{LOGLEVEL:log.level}\] \[pid %{POSINT:process.pid:long}(:tid %{INT:process.thread.id:int})?\](?: \(%{POSINT:apache.error.proxy.error.code?}\)%{DATA:apache.error.proxy.error.message}:)?(?: \[client %{IPORHOST:source.address}(?::%{POSINT:source.port:int})?\])?(?: %{DATA:error.code}:)? %{GREEDYDATA:message} +# :long - %{INT:process.thread.id:int} +HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG} + +# Deprecated +COMMONAPACHELOG %{HTTPD_COMMONLOG} +COMBINEDAPACHELOG %{HTTPD_COMBINEDLOG} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/java b/libs/grok/src/main/resources/patterns/ecs-v1/java new file mode 100644 index 0000000000000..8dd539f6c0283 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/java @@ -0,0 +1,34 @@ +JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]* +#Space is an allowed character to match special cases like 'Native Method' or 'Unknown Source' +JAVAFILE (?:[a-zA-Z$_0-9. -]+) +#Allow special , methods +JAVAMETHOD (?:(<(?:cl)?init>)|[a-zA-Z$_][a-zA-Z$_0-9]*) +#Line number is optional in special cases 'Native method' or 'Unknown source' +JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:java.log.origin.class.name}\.%{JAVAMETHOD:log.origin.function}\(%{JAVAFILE:log.origin.file.name}(?::%{INT:log.origin.file.line:int})?\) +# Java Logs +JAVATHREAD (?:[A-Z]{2}-Processor[\d]+) +JAVALOGMESSAGE (?:.*) + +# MMM dd, yyyy HH:mm:ss eg: Jan 9, 2014 7:13:13 AM +# matches default logging configuration in Tomcat 4.1, 5.0, 5.5, 6.0, 7.0 +CATALINA7_DATESTAMP %{MONTH} %{MONTHDAY}, %{YEAR} %{HOUR}:%{MINUTE}:%{SECOND} (?:AM|PM) +CATALINA7_LOG %{CATALINA7_DATESTAMP:timestamp} %{JAVACLASS:java.log.origin.class.name}(?: %{JAVAMETHOD:log.origin.function})?\s*(?:%{LOGLEVEL:log.level}:)? %{JAVALOGMESSAGE:message} + +# 31-Jul-2020 16:40:38.578 in Tomcat 8.5/9.0 +CATALINA8_DATESTAMP %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}:%{SECOND} +CATALINA8_LOG %{CATALINA8_DATESTAMP:timestamp} %{LOGLEVEL:log.level} \[%{DATA:java.log.origin.thread.name}\] %{JAVACLASS:java.log.origin.class.name}\.(?:%{JAVAMETHOD:log.origin.function})? %{JAVALOGMESSAGE:message} + +CATALINA_DATESTAMP (?:%{CATALINA8_DATESTAMP})|(?:%{CATALINA7_DATESTAMP}) +CATALINALOG (?:%{CATALINA8_LOG})|(?:%{CATALINA7_LOG}) + +# in Tomcat 5.5, 6.0, 7.0 it is the same as catalina.out logging format +TOMCAT7_LOG %{CATALINA7_LOG} +TOMCAT8_LOG %{CATALINA8_LOG} + +# NOTE: a weird log we started with - not sure what TC version this should match out of the box (due the | delimiters) +TOMCATLEGACY_DATESTAMP %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND}(?: %{ISO8601_TIMEZONE})? +TOMCATLEGACY_LOG %{TOMCATLEGACY_DATESTAMP:timestamp} \| %{LOGLEVEL:log.level} \| %{JAVACLASS:java.log.origin.class.name} - %{JAVALOGMESSAGE:message} + +TOMCAT_DATESTAMP (?:%{CATALINA8_DATESTAMP})|(?:%{CATALINA7_DATESTAMP})|(?:%{TOMCATLEGACY_DATESTAMP}) + +TOMCATLOG (?:%{TOMCAT8_LOG})|(?:%{TOMCAT7_LOG})|(?:%{TOMCATLEGACY_LOG}) diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/junos b/libs/grok/src/main/resources/patterns/ecs-v1/junos new file mode 100644 index 0000000000000..d23d45502aa19 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/junos @@ -0,0 +1,13 @@ +# JUNOS 11.4 RT_FLOW patterns +RT_FLOW_TAG (?:RT_FLOW_SESSION_CREATE|RT_FLOW_SESSION_CLOSE|RT_FLOW_SESSION_DENY) +# deprecated legacy name: +RT_FLOW_EVENT RT_FLOW_TAG + +RT_FLOW1 %{RT_FLOW_TAG:juniper.srx.tag}: %{GREEDYDATA:juniper.srx.reason}: %{IP:source.ip}/%{INT:source.port:int}->%{IP:destination.ip}/%{INT:destination.port:int} %{DATA:juniper.srx.service_name} %{IP:source.nat.ip}/%{INT:source.nat.port:int}->%{IP:destination.nat.ip}/%{INT:destination.nat.port:int} (?:(?:None)|(?:%{DATA:juniper.srx.src_nat_rule_name})) (?:(?:None)|(?:%{DATA:juniper.srx.dst_nat_rule_name})) %{INT:network.iana_number} %{DATA:rule.name} %{DATA:observer.ingress.zone} %{DATA:observer.egress.zone} %{INT:juniper.srx.session_id} \d+\(%{INT:source.bytes:long}\) \d+\(%{INT:destination.bytes:long}\) %{INT:juniper.srx.elapsed_time:int} .* +# :long - %{INT:source.bytes:int} +# :long - %{INT:destination.bytes:int} + +RT_FLOW2 %{RT_FLOW_TAG:juniper.srx.tag}: session created %{IP:source.ip}/%{INT:source.port:int}->%{IP:destination.ip}/%{INT:destination.port:int} %{DATA:juniper.srx.service_name} %{IP:source.nat.ip}/%{INT:source.nat.port:int}->%{IP:destination.nat.ip}/%{INT:destination.nat.port:int} (?:(?:None)|(?:%{DATA:juniper.srx.src_nat_rule_name})) (?:(?:None)|(?:%{DATA:juniper.srx.dst_nat_rule_name})) %{INT:network.iana_number} %{DATA:rule.name} %{DATA:observer.ingress.zone} %{DATA:observer.egress.zone} %{INT:juniper.srx.session_id} .* + +RT_FLOW3 %{RT_FLOW_TAG:juniper.srx.tag}: session denied %{IP:source.ip}/%{INT:source.port:int}->%{IP:destination.ip}/%{INT:destination.port:int} %{DATA:juniper.srx.service_name} %{INT:network.iana_number}\(\d\) %{DATA:rule.name} %{DATA:observer.ingress.zone} %{DATA:observer.egress.zone} .* + diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/linux-syslog b/libs/grok/src/main/resources/patterns/ecs-v1/linux-syslog new file mode 100644 index 0000000000000..f2582f506c099 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/linux-syslog @@ -0,0 +1,16 @@ +SYSLOG5424PRINTASCII [!-~]+ + +SYSLOGBASE2 (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp})(?: %{SYSLOGFACILITY})?(?: %{SYSLOGHOST:host.hostname})?(?: %{SYSLOGPROG}:)? +SYSLOGPAMSESSION %{SYSLOGBASE} (?=%{GREEDYDATA:message})%{WORD:system.auth.pam.module}\(%{DATA:system.auth.pam.origin}\): session %{WORD:system.auth.pam.session_state} for user %{USERNAME:user.name}(?: by %{GREEDYDATA})? + +CRON_ACTION [A-Z ]+ +CRONLOG %{SYSLOGBASE} \(%{USER:user.name}\) %{CRON_ACTION:system.cron.action} \(%{DATA:message}\) + +SYSLOGLINE %{SYSLOGBASE2} %{GREEDYDATA:message} + +# IETF 5424 syslog(8) format (see http://www.rfc-editor.org/info/rfc5424) +SYSLOG5424PRI <%{NONNEGINT:log.syslog.priority:int}> +SYSLOG5424SD \[%{DATA}\]+ +SYSLOG5424BASE %{SYSLOG5424PRI}%{NONNEGINT:system.syslog.version} +(?:-|%{TIMESTAMP_ISO8601:timestamp}) +(?:-|%{IPORHOST:host.hostname}) +(?:-|%{SYSLOG5424PRINTASCII:process.name}) +(?:-|%{POSINT:process.pid:int}) +(?:-|%{SYSLOG5424PRINTASCII:event.code}) +(?:-|%{SYSLOG5424SD:system.syslog.structured_data})? + +SYSLOG5424LINE %{SYSLOG5424BASE} +%{GREEDYDATA:message} diff --git a/libs/grok/src/main/resources/patterns/maven b/libs/grok/src/main/resources/patterns/ecs-v1/maven similarity index 100% rename from libs/grok/src/main/resources/patterns/maven rename to libs/grok/src/main/resources/patterns/ecs-v1/maven diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/mcollective b/libs/grok/src/main/resources/patterns/ecs-v1/mcollective new file mode 100644 index 0000000000000..f797cbde8a2bd --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/mcollective @@ -0,0 +1,4 @@ +# Remember, these can be multi-line events. +MCOLLECTIVE ., \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:process.pid:int}\]%{SPACE}%{LOGLEVEL:log.level} + +MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}: diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/mongodb b/libs/grok/src/main/resources/patterns/ecs-v1/mongodb new file mode 100644 index 0000000000000..7f1c03de61f21 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/mongodb @@ -0,0 +1,7 @@ +MONGO_LOG %{SYSLOGTIMESTAMP:timestamp} \[%{WORD:mongodb.component}\] %{GREEDYDATA:message} +MONGO_QUERY \{ (?<={ ).*(?= } ntoreturn:) \} +MONGO_SLOWQUERY %{WORD:mongodb.profile.op} %{MONGO_WORDDASH:mongodb.database}\.%{MONGO_WORDDASH:mongodb.collection} %{WORD}: %{MONGO_QUERY:mongodb.query.original} ntoreturn:%{NONNEGINT:mongodb.profile.ntoreturn:int} ntoskip:%{NONNEGINT:mongodb.profile.ntoskip:int} nscanned:%{NONNEGINT:mongodb.profile.nscanned:int}.*? nreturned:%{NONNEGINT:mongodb.profile.nreturned:int}.*? %{INT:mongodb.profile.duration:int}ms +MONGO_WORDDASH \b[\w-]+\b +MONGO3_SEVERITY \w +MONGO3_COMPONENT %{WORD} +MONGO3_LOG %{TIMESTAMP_ISO8601:timestamp} %{MONGO3_SEVERITY:log.level} (?:-|%{MONGO3_COMPONENT:mongodb.component})%{SPACE}(?:\[%{DATA:mongodb.context}\])? %{GREEDYDATA:message} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/nagios b/libs/grok/src/main/resources/patterns/ecs-v1/nagios new file mode 100644 index 0000000000000..d0a3b423b14f7 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/nagios @@ -0,0 +1,124 @@ +################################################################################## +################################################################################## +# Chop Nagios log files to smithereens! +# +# A set of GROK filters to process logfiles generated by Nagios. +# While it does not, this set intends to cover all possible Nagios logs. +# +# Some more work needs to be done to cover all External Commands: +# http://old.nagios.org/developerinfo/externalcommands/commandlist.php +# +# If you need some support on these rules please contact: +# Jelle Smet http://smetj.net +# +################################################################################# +################################################################################# + +NAGIOSTIME \[%{NUMBER:timestamp}\] + +############################################### +######## Begin nagios log types +############################################### +NAGIOS_TYPE_CURRENT_SERVICE_STATE CURRENT SERVICE STATE +NAGIOS_TYPE_CURRENT_HOST_STATE CURRENT HOST STATE + +NAGIOS_TYPE_SERVICE_NOTIFICATION SERVICE NOTIFICATION +NAGIOS_TYPE_HOST_NOTIFICATION HOST NOTIFICATION + +NAGIOS_TYPE_SERVICE_ALERT SERVICE ALERT +NAGIOS_TYPE_HOST_ALERT HOST ALERT + +NAGIOS_TYPE_SERVICE_FLAPPING_ALERT SERVICE FLAPPING ALERT +NAGIOS_TYPE_HOST_FLAPPING_ALERT HOST FLAPPING ALERT + +NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT SERVICE DOWNTIME ALERT +NAGIOS_TYPE_HOST_DOWNTIME_ALERT HOST DOWNTIME ALERT + +NAGIOS_TYPE_PASSIVE_SERVICE_CHECK PASSIVE SERVICE CHECK +NAGIOS_TYPE_PASSIVE_HOST_CHECK PASSIVE HOST CHECK + +NAGIOS_TYPE_SERVICE_EVENT_HANDLER SERVICE EVENT HANDLER +NAGIOS_TYPE_HOST_EVENT_HANDLER HOST EVENT HANDLER + +NAGIOS_TYPE_EXTERNAL_COMMAND EXTERNAL COMMAND +NAGIOS_TYPE_TIMEPERIOD_TRANSITION TIMEPERIOD TRANSITION +############################################### +######## End nagios log types +############################################### + +############################################### +######## Begin external check types +############################################### +NAGIOS_EC_DISABLE_SVC_CHECK DISABLE_SVC_CHECK +NAGIOS_EC_ENABLE_SVC_CHECK ENABLE_SVC_CHECK +NAGIOS_EC_DISABLE_HOST_CHECK DISABLE_HOST_CHECK +NAGIOS_EC_ENABLE_HOST_CHECK ENABLE_HOST_CHECK +NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT PROCESS_SERVICE_CHECK_RESULT +NAGIOS_EC_PROCESS_HOST_CHECK_RESULT PROCESS_HOST_CHECK_RESULT +NAGIOS_EC_SCHEDULE_SERVICE_DOWNTIME SCHEDULE_SERVICE_DOWNTIME +NAGIOS_EC_SCHEDULE_HOST_DOWNTIME SCHEDULE_HOST_DOWNTIME +NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS DISABLE_HOST_SVC_NOTIFICATIONS +NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS ENABLE_HOST_SVC_NOTIFICATIONS +NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS DISABLE_HOST_NOTIFICATIONS +NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS ENABLE_HOST_NOTIFICATIONS +NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS DISABLE_SVC_NOTIFICATIONS +NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS ENABLE_SVC_NOTIFICATIONS +############################################### +######## End external check types +############################################### +NAGIOS_WARNING Warning:%{SPACE}%{GREEDYDATA:message} + +NAGIOS_CURRENT_SERVICE_STATE %{NAGIOS_TYPE_CURRENT_SERVICE_STATE:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.name};%{DATA:service.state};%{DATA:nagios.log.state_type};%{INT:nagios.log.attempt:int};%{GREEDYDATA:message} +NAGIOS_CURRENT_HOST_STATE %{NAGIOS_TYPE_CURRENT_HOST_STATE:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.state};%{DATA:nagios.log.state_type};%{INT:nagios.log.attempt:int};%{GREEDYDATA:message} + +NAGIOS_SERVICE_NOTIFICATION %{NAGIOS_TYPE_SERVICE_NOTIFICATION:nagios.log.type}: %{DATA:user.name};%{DATA:host.hostname};%{DATA:service.name};%{DATA:service.state};%{DATA:nagios.log.notification_command};%{GREEDYDATA:message} +NAGIOS_HOST_NOTIFICATION %{NAGIOS_TYPE_HOST_NOTIFICATION:nagios.log.type}: %{DATA:user.name};%{DATA:host.hostname};%{DATA:service.state};%{DATA:nagios.log.notification_command};%{GREEDYDATA:message} + +NAGIOS_SERVICE_ALERT %{NAGIOS_TYPE_SERVICE_ALERT:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.name};%{DATA:service.state};%{DATA:nagios.log.state_type};%{INT:nagios.log.attempt:int};%{GREEDYDATA:message} +NAGIOS_HOST_ALERT %{NAGIOS_TYPE_HOST_ALERT:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.state};%{DATA:nagios.log.state_type};%{INT:nagios.log.attempt:int};%{GREEDYDATA:message} + +NAGIOS_SERVICE_FLAPPING_ALERT %{NAGIOS_TYPE_SERVICE_FLAPPING_ALERT:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.name};%{DATA:service.state};%{GREEDYDATA:message} +NAGIOS_HOST_FLAPPING_ALERT %{NAGIOS_TYPE_HOST_FLAPPING_ALERT:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.state};%{GREEDYDATA:message} + +NAGIOS_SERVICE_DOWNTIME_ALERT %{NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.name};%{DATA:service.state};%{GREEDYDATA:nagios.log.comment} +NAGIOS_HOST_DOWNTIME_ALERT %{NAGIOS_TYPE_HOST_DOWNTIME_ALERT:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.state};%{GREEDYDATA:nagios.log.comment} + +NAGIOS_PASSIVE_SERVICE_CHECK %{NAGIOS_TYPE_PASSIVE_SERVICE_CHECK:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.name};%{DATA:service.state};%{GREEDYDATA:nagios.log.comment} +NAGIOS_PASSIVE_HOST_CHECK %{NAGIOS_TYPE_PASSIVE_HOST_CHECK:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.state};%{GREEDYDATA:nagios.log.comment} + +NAGIOS_SERVICE_EVENT_HANDLER %{NAGIOS_TYPE_SERVICE_EVENT_HANDLER:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.name};%{DATA:service.state};%{DATA:nagios.log.state_type};%{DATA:nagios.log.event_handler_name} +NAGIOS_HOST_EVENT_HANDLER %{NAGIOS_TYPE_HOST_EVENT_HANDLER:nagios.log.type}: %{DATA:host.hostname};%{DATA:service.state};%{DATA:nagios.log.state_type};%{DATA:nagios.log.event_handler_name} + +NAGIOS_TIMEPERIOD_TRANSITION %{NAGIOS_TYPE_TIMEPERIOD_TRANSITION:nagios.log.type}: %{DATA:service.name};%{NUMBER:nagios.log.period_from:int};%{NUMBER:nagios.log.period_to:int} + +#################### +#### External checks +#################### + +#Disable host & service check +NAGIOS_EC_LINE_DISABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_DISABLE_SVC_CHECK:nagios.log.command};%{DATA:host.hostname};%{DATA:service.name} +NAGIOS_EC_LINE_DISABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_DISABLE_HOST_CHECK:nagios.log.command};%{DATA:host.hostname} + +#Enable host & service check +NAGIOS_EC_LINE_ENABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_ENABLE_SVC_CHECK:nagios.log.command};%{DATA:host.hostname};%{DATA:service.name} +NAGIOS_EC_LINE_ENABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_ENABLE_HOST_CHECK:nagios.log.command};%{DATA:host.hostname} + +#Process host & service check +NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT:nagios.log.command};%{DATA:host.hostname};%{DATA:service.name};%{DATA:service.state};%{GREEDYDATA:nagios.log.check_result} +NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_PROCESS_HOST_CHECK_RESULT:nagios.log.command};%{DATA:host.hostname};%{DATA:service.state};%{GREEDYDATA:nagios.log.check_result} + +#Disable host & service notifications +NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS:nagios.log.command};%{GREEDYDATA:host.hostname} +NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS:nagios.log.command};%{GREEDYDATA:host.hostname} +NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS:nagios.log.command};%{DATA:host.hostname};%{GREEDYDATA:service.name} + +#Enable host & service notifications +NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS:nagios.log.command};%{GREEDYDATA:host.hostname} +NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS:nagios.log.command};%{GREEDYDATA:host.hostname} +NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS:nagios.log.command};%{DATA:host.hostname};%{GREEDYDATA:service.name} + +#Schedule host & service downtime +NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios.log.type}: %{NAGIOS_EC_SCHEDULE_HOST_DOWNTIME:nagios.log.command};%{DATA:host.hostname};%{NUMBER:nagios.log.start_time};%{NUMBER:nagios.log.end_time};%{NUMBER:nagios.log.fixed};%{NUMBER:nagios.log.trigger_id};%{NUMBER:nagios.log.duration:int};%{DATA:user.name};%{DATA:nagios.log.comment} + +#End matching line +NAGIOSLOGLINE %{NAGIOSTIME} (?:%{NAGIOS_WARNING}|%{NAGIOS_CURRENT_SERVICE_STATE}|%{NAGIOS_CURRENT_HOST_STATE}|%{NAGIOS_SERVICE_NOTIFICATION}|%{NAGIOS_HOST_NOTIFICATION}|%{NAGIOS_SERVICE_ALERT}|%{NAGIOS_HOST_ALERT}|%{NAGIOS_SERVICE_FLAPPING_ALERT}|%{NAGIOS_HOST_FLAPPING_ALERT}|%{NAGIOS_SERVICE_DOWNTIME_ALERT}|%{NAGIOS_HOST_DOWNTIME_ALERT}|%{NAGIOS_PASSIVE_SERVICE_CHECK}|%{NAGIOS_PASSIVE_HOST_CHECK}|%{NAGIOS_SERVICE_EVENT_HANDLER}|%{NAGIOS_HOST_EVENT_HANDLER}|%{NAGIOS_TIMEPERIOD_TRANSITION}|%{NAGIOS_EC_LINE_DISABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_ENABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_DISABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_ENABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT}|%{NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT}|%{NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME}|%{NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS}) diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/postgresql b/libs/grok/src/main/resources/patterns/ecs-v1/postgresql new file mode 100644 index 0000000000000..cbfd5a690c4d2 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/postgresql @@ -0,0 +1,2 @@ +# Default postgresql pg_log format pattern +POSTGRESQL %{DATESTAMP:timestamp} %{TZ:event.timezone} %{DATA:user.name} %{GREEDYDATA:postgresql.log.connection_id} %{POSINT:process.pid:int} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/rails b/libs/grok/src/main/resources/patterns/ecs-v1/rails new file mode 100644 index 0000000000000..81717d9b8ffbe --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/rails @@ -0,0 +1,13 @@ +RUUID \h{32} +# rails controller with action +RCONTROLLER (?[^#]+)#(?\w+) + +# this will often be the only line: +RAILS3HEAD (?m)Started %{WORD:http.request.method} "%{URIPATHPARAM:url.original}" for %{IPORHOST:source.address} at (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND} %{ISO8601_TIMEZONE}) +# for some a strange reason, params are stripped of {} - not sure that's a good idea. +RPROCESSING \W*Processing by %{RCONTROLLER} as (?\S+)(?:\W*Parameters: {%{DATA:rails.request.params}}\W*)? +RAILS3FOOT Completed %{POSINT:http.response.status_code:int}%{DATA} in %{NUMBER:rails.request.duration.total:float}ms %{RAILS3PROFILE}%{GREEDYDATA} +RAILS3PROFILE (?:\(Views: %{NUMBER:rails.request.duration.view:float}ms \| ActiveRecord: %{NUMBER:rails.request.duration.active_record:float}ms|\(ActiveRecord: %{NUMBER:rails.request.duration.active_record:float}ms)? + +# putting it all together +RAILS3 %{RAILS3HEAD}(?:%{RPROCESSING})?(?(?:%{DATA}\n)*)(?:%{RAILS3FOOT})? diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/redis b/libs/grok/src/main/resources/patterns/ecs-v1/redis new file mode 100644 index 0000000000000..063290ed80dd9 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/redis @@ -0,0 +1,3 @@ +REDISTIMESTAMP %{MONTHDAY} %{MONTH} %{TIME} +REDISLOG \[%{POSINT:process.pid:int}\] %{REDISTIMESTAMP:timestamp} \* +REDISMONLOG %{NUMBER:timestamp} \[%{INT:redis.database.id} %{IP:client.ip}:%{POSINT:client.port:int}\] "%{WORD:redis.command.name}"\s?%{GREEDYDATA:redis.command.args} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/ruby b/libs/grok/src/main/resources/patterns/ecs-v1/ruby new file mode 100644 index 0000000000000..2c9a7cedd5146 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/ruby @@ -0,0 +1,2 @@ +RUBY_LOGLEVEL (?:DEBUG|FATAL|ERROR|WARN|INFO) +RUBY_LOGGER [DFEWI], \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:process.pid:int}\] *%{RUBY_LOGLEVEL:log.level} -- +%{DATA:process.name}: %{GREEDYDATA:message} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/squid b/libs/grok/src/main/resources/patterns/ecs-v1/squid new file mode 100644 index 0000000000000..dfff4f623f095 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/squid @@ -0,0 +1,6 @@ +# Pattern squid3 +# Documentation of squid3 logs formats can be found at the following link: +# http://wiki.squid-cache.org/Features/LogFormat +SQUID3_STATUS (?:%{POSINT:http.response.status_code:int}|0|000) +SQUID3 %{NUMBER:timestamp}\s+%{NUMBER:squid.request.duration:int}\s%{IP:source.ip}\s%{WORD:event.action}/%{SQUID3_STATUS}\s%{INT:http.response.bytes:long}\s%{WORD:http.request.method}\s%{NOTSPACE:url.original}\s(?:-|%{NOTSPACE:user.name})\s%{WORD:squid.hierarchy_code}/(?:-|%{IPORHOST:destination.address})\s(?:-|%{NOTSPACE:http.response.mime_type}) +# :long - %{INT:http.response.bytes:int} diff --git a/libs/grok/src/main/resources/patterns/ecs-v1/zeek b/libs/grok/src/main/resources/patterns/ecs-v1/zeek new file mode 100644 index 0000000000000..397e84aa17c35 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/ecs-v1/zeek @@ -0,0 +1,33 @@ +# updated Zeek log matching, for legacy matching see the patters/ecs-v1/bro + +ZEEK_BOOL [TF] +ZEEK_DATA [^\t]+ + +# http.log - the 'new' format (compared to BRO_HTTP) +# has *version* and *origin* fields added and *filename* replaced with *orig_filenames* + *resp_filenames* +ZEEK_HTTP %{NUMBER:timestamp}\t%{NOTSPACE:zeek.session_id}\t%{IP:source.ip}\t%{INT:source.port:int}\t%{IP:destination.ip}\t%{INT:destination.port:int}\t%{INT:zeek.http.trans_depth:int}\t(?:-|%{WORD:http.request.method})\t(?:-|%{ZEEK_DATA:url.domain})\t(?:-|%{ZEEK_DATA:url.original})\t(?:-|%{ZEEK_DATA:http.request.referrer})\t(?:-|%{NUMBER:http.version})\t(?:-|%{ZEEK_DATA:user_agent.original})\t(?:-|%{ZEEK_DATA:zeek.http.origin})\t(?:-|%{NUMBER:http.request.body.bytes:long})\t(?:-|%{NUMBER:http.response.body.bytes:long})\t(?:-|%{POSINT:http.response.status_code:int})\t(?:-|%{DATA:zeek.http.status_msg})\t(?:-|%{POSINT:zeek.http.info_code:int})\t(?:-|%{DATA:zeek.http.info_msg})\t(?:\(empty\)|%{ZEEK_DATA:zeek.http.tags})\t(?:-|%{ZEEK_DATA:url.username})\t(?:-|%{ZEEK_DATA:url.password})\t(?:-|%{ZEEK_DATA:zeek.http.proxied})\t(?:-|%{ZEEK_DATA:zeek.http.orig_fuids})\t(?:-|%{ZEEK_DATA:zeek.http.orig_filenames})\t(?:-|%{ZEEK_DATA:http.request.mime_type})\t(?:-|%{ZEEK_DATA:zeek.http.resp_fuids})\t(?:-|%{ZEEK_DATA:zeek.http.resp_filenames})\t(?:-|%{ZEEK_DATA:http.response.mime_type}) +# :long - %{NUMBER:http.request.body.bytes:int} +# :long - %{NUMBER:http.response.body.bytes:int} + +# dns.log - 'updated' BRO_DNS format (added *zeek.dns.rtt*) +ZEEK_DNS %{NUMBER:timestamp}\t%{NOTSPACE:zeek.session_id}\t%{IP:source.ip}\t%{INT:source.port:int}\t%{IP:destination.ip}\t%{INT:destination.port:int}\t%{WORD:network.transport}\t(?:-|%{INT:dns.id:int})\t(?:-|%{NUMBER:zeek.dns.rtt:float})\t(?:-|%{ZEEK_DATA:dns.question.name})\t(?:-|%{INT:zeek.dns.qclass:int})\t(?:-|%{ZEEK_DATA:zeek.dns.qclass_name})\t(?:-|%{INT:zeek.dns.qtype:int})\t(?:-|%{ZEEK_DATA:dns.question.type})\t(?:-|%{INT:zeek.dns.rcode:int})\t(?:-|%{ZEEK_DATA:dns.response_code})\t%{ZEEK_BOOL:zeek.dns.AA}\t%{ZEEK_BOOL:zeek.dns.TC}\t%{ZEEK_BOOL:zeek.dns.RD}\t%{ZEEK_BOOL:zeek.dns.RA}\t%{NONNEGINT:zeek.dns.Z:int}\t(?:-|%{ZEEK_DATA:zeek.dns.answers})\t(?:-|%{DATA:zeek.dns.TTLs})\t(?:-|%{ZEEK_BOOL:zeek.dns.rejected}) + +# conn.log - the 'new' format (requires *zeek.connection.local_resp*, handles `(empty)` as `-` for tunnel_parents, and optional mac adresses) +ZEEK_CONN %{NUMBER:timestamp}\t%{NOTSPACE:zeek.session_id}\t%{IP:source.ip}\t%{INT:source.port:int}\t%{IP:destination.ip}\t%{INT:destination.port:int}\t%{WORD:network.transport}\t(?:-|%{ZEEK_DATA:network.protocol})\t(?:-|%{NUMBER:zeek.connection.duration:float})\t(?:-|%{INT:zeek.connection.orig_bytes:long})\t(?:-|%{INT:zeek.connection.resp_bytes:long})\t(?:-|%{ZEEK_DATA:zeek.connection.state})\t(?:-|%{ZEEK_BOOL:zeek.connection.local_orig})\t(?:-|%{ZEEK_BOOL:zeek.connection.local_resp})\t(?:-|%{INT:zeek.connection.missed_bytes:long})\t(?:-|%{ZEEK_DATA:zeek.connection.history})\t(?:-|%{INT:source.packets:long})\t(?:-|%{INT:source.bytes:long})\t(?:-|%{INT:destination.packets:long})\t(?:-|%{INT:destination.bytes:long})\t(?:-|%{ZEEK_DATA:zeek.connection.tunnel_parents})(?:\t(?:-|%{COMMONMAC:source.mac})\t(?:-|%{COMMONMAC:destination.mac}))? +# :long - %{INT:zeek.connection.orig_bytes:int} +# :long - %{INT:zeek.connection.resp_bytes:int} +# :long - %{INT:zeek.connection.missed_bytes:int} +# :long - %{INT:source.packets:int} +# :long - %{INT:source.bytes:int} +# :long - %{INT:destination.packets:int} +# :long - %{INT:destination.bytes:int} + +# files.log - updated BRO_FILES format (2 new fields added at the end) +ZEEK_FILES_TX_HOSTS (?:-|%{IP:server.ip})|(?%{IP:server.ip}(?:\s,%{IP})+) +ZEEK_FILES_RX_HOSTS (?:-|%{IP:client.ip})|(?%{IP:client.ip}(?:\s,%{IP})+) +ZEEK_FILES %{NUMBER:timestamp}\t%{NOTSPACE:zeek.files.fuid}\t%{ZEEK_FILES_TX_HOSTS}\t%{ZEEK_FILES_RX_HOSTS}\t(?:-|%{ZEEK_DATA:zeek.files.session_ids})\t(?:-|%{ZEEK_DATA:zeek.files.source})\t(?:-|%{INT:zeek.files.depth:int})\t(?:-|%{ZEEK_DATA:zeek.files.analyzers})\t(?:-|%{ZEEK_DATA:file.mime_type})\t(?:-|%{ZEEK_DATA:file.name})\t(?:-|%{NUMBER:zeek.files.duration:float})\t(?:-|%{ZEEK_DATA:zeek.files.local_orig})\t(?:-|%{ZEEK_BOOL:zeek.files.is_orig})\t(?:-|%{INT:zeek.files.seen_bytes:long})\t(?:-|%{INT:file.size:long})\t(?:-|%{INT:zeek.files.missing_bytes:long})\t(?:-|%{INT:zeek.files.overflow_bytes:long})\t(?:-|%{ZEEK_BOOL:zeek.files.timedout})\t(?:-|%{ZEEK_DATA:zeek.files.parent_fuid})\t(?:-|%{ZEEK_DATA:file.hash.md5})\t(?:-|%{ZEEK_DATA:file.hash.sha1})\t(?:-|%{ZEEK_DATA:file.hash.sha256})\t(?:-|%{ZEEK_DATA:zeek.files.extracted})(?:\t(?:-|%{ZEEK_BOOL:zeek.files.extracted_cutoff})\t(?:-|%{INT:zeek.files.extracted_size:long}))? +# :long - %{INT:zeek.files.seen_bytes:int} +# :long - %{INT:file.size:int} +# :long - %{INT:zeek.files.missing_bytes:int} +# :long - %{INT:zeek.files.overflow_bytes:int} +# :long - %{INT:zeek.files.extracted_size:int} diff --git a/libs/grok/src/main/resources/patterns/aws b/libs/grok/src/main/resources/patterns/legacy/aws similarity index 100% rename from libs/grok/src/main/resources/patterns/aws rename to libs/grok/src/main/resources/patterns/legacy/aws diff --git a/libs/grok/src/main/resources/patterns/bacula b/libs/grok/src/main/resources/patterns/legacy/bacula similarity index 100% rename from libs/grok/src/main/resources/patterns/bacula rename to libs/grok/src/main/resources/patterns/legacy/bacula diff --git a/libs/grok/src/main/resources/patterns/bind b/libs/grok/src/main/resources/patterns/legacy/bind similarity index 100% rename from libs/grok/src/main/resources/patterns/bind rename to libs/grok/src/main/resources/patterns/legacy/bind diff --git a/libs/grok/src/main/resources/patterns/bro b/libs/grok/src/main/resources/patterns/legacy/bro similarity index 100% rename from libs/grok/src/main/resources/patterns/bro rename to libs/grok/src/main/resources/patterns/legacy/bro diff --git a/libs/grok/src/main/resources/patterns/exim b/libs/grok/src/main/resources/patterns/legacy/exim similarity index 100% rename from libs/grok/src/main/resources/patterns/exim rename to libs/grok/src/main/resources/patterns/legacy/exim diff --git a/libs/grok/src/main/resources/patterns/firewalls b/libs/grok/src/main/resources/patterns/legacy/firewalls similarity index 100% rename from libs/grok/src/main/resources/patterns/firewalls rename to libs/grok/src/main/resources/patterns/legacy/firewalls diff --git a/libs/grok/src/main/resources/patterns/grok-patterns b/libs/grok/src/main/resources/patterns/legacy/grok-patterns similarity index 100% rename from libs/grok/src/main/resources/patterns/grok-patterns rename to libs/grok/src/main/resources/patterns/legacy/grok-patterns diff --git a/libs/grok/src/main/resources/patterns/haproxy b/libs/grok/src/main/resources/patterns/legacy/haproxy similarity index 100% rename from libs/grok/src/main/resources/patterns/haproxy rename to libs/grok/src/main/resources/patterns/legacy/haproxy diff --git a/libs/grok/src/main/resources/patterns/httpd b/libs/grok/src/main/resources/patterns/legacy/httpd similarity index 100% rename from libs/grok/src/main/resources/patterns/httpd rename to libs/grok/src/main/resources/patterns/legacy/httpd diff --git a/libs/grok/src/main/resources/patterns/java b/libs/grok/src/main/resources/patterns/legacy/java similarity index 100% rename from libs/grok/src/main/resources/patterns/java rename to libs/grok/src/main/resources/patterns/legacy/java diff --git a/libs/grok/src/main/resources/patterns/junos b/libs/grok/src/main/resources/patterns/legacy/junos similarity index 100% rename from libs/grok/src/main/resources/patterns/junos rename to libs/grok/src/main/resources/patterns/legacy/junos diff --git a/libs/grok/src/main/resources/patterns/linux-syslog b/libs/grok/src/main/resources/patterns/legacy/linux-syslog similarity index 100% rename from libs/grok/src/main/resources/patterns/linux-syslog rename to libs/grok/src/main/resources/patterns/legacy/linux-syslog diff --git a/libs/grok/src/main/resources/patterns/legacy/maven b/libs/grok/src/main/resources/patterns/legacy/maven new file mode 100644 index 0000000000000..f1dc808871026 --- /dev/null +++ b/libs/grok/src/main/resources/patterns/legacy/maven @@ -0,0 +1 @@ +MAVEN_VERSION (?:(\d+)\.)?(?:(\d+)\.)?(\*|\d+)(?:[.-](RELEASE|SNAPSHOT))? diff --git a/libs/grok/src/main/resources/patterns/mcollective-patterns b/libs/grok/src/main/resources/patterns/legacy/mcollective-patterns similarity index 100% rename from libs/grok/src/main/resources/patterns/mcollective-patterns rename to libs/grok/src/main/resources/patterns/legacy/mcollective-patterns diff --git a/libs/grok/src/main/resources/patterns/mongodb b/libs/grok/src/main/resources/patterns/legacy/mongodb similarity index 100% rename from libs/grok/src/main/resources/patterns/mongodb rename to libs/grok/src/main/resources/patterns/legacy/mongodb diff --git a/libs/grok/src/main/resources/patterns/nagios b/libs/grok/src/main/resources/patterns/legacy/nagios similarity index 100% rename from libs/grok/src/main/resources/patterns/nagios rename to libs/grok/src/main/resources/patterns/legacy/nagios diff --git a/libs/grok/src/main/resources/patterns/postgresql b/libs/grok/src/main/resources/patterns/legacy/postgresql similarity index 100% rename from libs/grok/src/main/resources/patterns/postgresql rename to libs/grok/src/main/resources/patterns/legacy/postgresql diff --git a/libs/grok/src/main/resources/patterns/rails b/libs/grok/src/main/resources/patterns/legacy/rails similarity index 100% rename from libs/grok/src/main/resources/patterns/rails rename to libs/grok/src/main/resources/patterns/legacy/rails diff --git a/libs/grok/src/main/resources/patterns/redis b/libs/grok/src/main/resources/patterns/legacy/redis similarity index 100% rename from libs/grok/src/main/resources/patterns/redis rename to libs/grok/src/main/resources/patterns/legacy/redis diff --git a/libs/grok/src/main/resources/patterns/ruby b/libs/grok/src/main/resources/patterns/legacy/ruby similarity index 100% rename from libs/grok/src/main/resources/patterns/ruby rename to libs/grok/src/main/resources/patterns/legacy/ruby diff --git a/libs/grok/src/main/resources/patterns/squid b/libs/grok/src/main/resources/patterns/legacy/squid similarity index 100% rename from libs/grok/src/main/resources/patterns/squid rename to libs/grok/src/main/resources/patterns/legacy/squid diff --git a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java index df43b84b23dfb..41a56ab5b86f9 100644 --- a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java +++ b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java @@ -8,10 +8,12 @@ package org.elasticsearch.grok; +import org.elasticsearch.core.Tuple; import org.elasticsearch.grok.GrokCaptureConfig.NativeExtracterMap; import org.elasticsearch.test.ESTestCase; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -27,6 +29,7 @@ import java.util.function.IntConsumer; import java.util.function.LongConsumer; +import static org.elasticsearch.core.Tuple.tuple; import static org.elasticsearch.grok.GrokCaptureType.BOOLEAN; import static org.elasticsearch.grok.GrokCaptureType.DOUBLE; import static org.elasticsearch.grok.GrokCaptureType.FLOAT; @@ -40,15 +43,26 @@ public class GrokTests extends ESTestCase { + public void testMatchWithoutCaptures() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "value", logger::warn); + testMatchWithoutCaptures(false); + testMatchWithoutCaptures(true); + } + + private void testMatchWithoutCaptures(boolean ecsCompatibility) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "value", logger::warn); assertThat(grok.captures("value"), equalTo(Map.of())); assertThat(grok.captures("prefix_value"), equalTo(Map.of())); assertThat(grok.captures("no_match"), nullValue()); } - public void testCaputuresBytes() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "%{NUMBER:n:int}", logger::warn); + public void testCapturesBytes() { + testCapturesBytes(false); + testCapturesBytes(true); + } + + private void testCapturesBytes(boolean ecsCompatibility) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "%{NUMBER:n:int}", logger::warn); byte[] utf8 = "10".getBytes(StandardCharsets.UTF_8); assertThat(captureBytes(grok, utf8, 0, utf8.length), equalTo(Map.of("n", 10))); assertThat(captureBytes(grok, utf8, 0, 1), equalTo(Map.of("n", 1))); @@ -72,79 +86,171 @@ public void testNoMatchingPatternInDictionary() { } public void testSimpleSyslogLine() { - String line = "Mar 16 00:01:25 evita postfix/smtpd[1713]: connect from camomile.cloud9.net[168.100.1.3]"; - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "%{SYSLOGLINE}", logger::warn); - assertCaptureConfig( - grok, - Map.ofEntries( - Map.entry("facility", STRING), - Map.entry("logsource", STRING), - Map.entry("message", STRING), - Map.entry("pid", STRING), - Map.entry("priority", STRING), - Map.entry("program", STRING), - Map.entry("timestamp", STRING), - Map.entry("timestamp8601", STRING) - ) + final String logSource = "evita"; + final String timestamp = "Mar 16 00:01:25"; + final String message = "connect from camomile.cloud9.net[168.100.1.3]"; + final String program = "postfix/smtpd"; + + testSimpleSyslogLine( + false, + tuple(Map.entry("facility", STRING), null), + tuple(Map.entry("logsource", STRING), logSource), + tuple(Map.entry("message", STRING), message), + tuple(Map.entry("pid", STRING), "1713"), + tuple(Map.entry("priority", STRING), null), + tuple(Map.entry("program", STRING), program), + tuple(Map.entry("timestamp", STRING), timestamp), + tuple(Map.entry("timestamp8601", STRING), null), + List.of() ); + + testSimpleSyslogLine( + true, + tuple(Map.entry("log.syslog.facility.code", INTEGER), null), + tuple(Map.entry("host.hostname", STRING), logSource), + tuple(Map.entry("message", STRING), message), + tuple(Map.entry("process.pid", INTEGER), 1713), + tuple(Map.entry("log.syslog.priority", INTEGER), null), + tuple(Map.entry("process.name", STRING), program), + tuple(Map.entry("timestamp", STRING), timestamp), + null, + List.of("timestamp") + ); + } + + private void testSimpleSyslogLine( + boolean ecsCompatibility, + Tuple, Object> facility, + Tuple, Object> logSource, + Tuple, Object> message, + Tuple, Object> pid, + Tuple, Object> priority, + Tuple, Object> program, + Tuple, Object> timestamp, + Tuple, Object> timestamp8601, + List acceptedDuplicates + ) { + String line = "Mar 16 00:01:25 evita postfix/smtpd[1713]: connect from camomile.cloud9.net[168.100.1.3]"; + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "%{SYSLOGLINE}", logger::warn); + + Map captureTypes = new HashMap<>(); + captureTypes.put(facility.v1().getKey(), facility.v1().getValue()); + captureTypes.put(logSource.v1().getKey(), logSource.v1().getValue()); + captureTypes.put(message.v1().getKey(), message.v1().getValue()); + captureTypes.put(pid.v1().getKey(), pid.v1().getValue()); + captureTypes.put(priority.v1().getKey(), priority.v1().getValue()); + captureTypes.put(program.v1().getKey(), program.v1().getValue()); + captureTypes.put(timestamp.v1().getKey(), timestamp.v1().getValue()); + if (timestamp8601 != null) { + captureTypes.put(timestamp8601.v1().getKey(), timestamp8601.v1().getValue()); + } + + assertCaptureConfig(grok, captureTypes, acceptedDuplicates); Map matches = grok.captures(line); - assertEquals("evita", matches.get("logsource")); - assertEquals("Mar 16 00:01:25", matches.get("timestamp")); - assertEquals("connect from camomile.cloud9.net[168.100.1.3]", matches.get("message")); - assertEquals("postfix/smtpd", matches.get("program")); - assertEquals("1713", matches.get("pid")); + assertEquals(logSource.v2(), matches.get(logSource.v1().getKey())); + assertEquals(timestamp.v2(), matches.get(timestamp.v1().getKey())); + assertEquals(message.v2(), matches.get(message.v1().getKey())); + assertEquals(program.v2(), matches.get(program.v1().getKey())); + assertEquals(pid.v2(), matches.get(pid.v1().getKey())); String[] logsource = new String[1]; - GrokCaptureExtracter logsourceExtracter = namedConfig(grok, "logsource").nativeExtracter(new ThrowingNativeExtracterMap() { - @Override - public GrokCaptureExtracter forString(Function, GrokCaptureExtracter> buildExtracter) { - return buildExtracter.apply(str -> logsource[0] = str); - } - }); + GrokCaptureExtracter logsourceExtracter = + namedConfig(grok, logSource.v1().getKey()) + .nativeExtracter(new ThrowingNativeExtracterMap() { + @Override + public GrokCaptureExtracter forString(Function, GrokCaptureExtracter> buildExtracter) { + return buildExtracter.apply(str -> logsource[0] = str); + } + }); assertThat(specificCapture(grok, line, logsourceExtracter), is(true)); - assertThat(logsource[0], equalTo("evita")); + assertThat(logsource[0], equalTo(logSource.v2())); } public void testSyslog5424Line() { + final String ts = "2009-06-30T18:30:00+02:00"; + final String host = "paxton.local"; + final String app = "grokdebug"; + final String sd = "[id1 foo=\\\"bar\\\"][id2 baz=\\\"something\\\"]"; + final String msg = "Hello, syslog."; + final String ver = "1"; + + testSyslog5424Line( + false, + tuple(Map.entry("syslog5424_app", STRING), app), + tuple(Map.entry("syslog5424_host", STRING), host), + tuple(Map.entry("syslog5424_msg", STRING), msg), + tuple(Map.entry("syslog5424_msgid", STRING), null), + tuple(Map.entry("syslog5424_pri", STRING), "191"), + tuple(Map.entry("syslog5424_proc", STRING), "4123"), + tuple(Map.entry("syslog5424_sd", STRING), sd), + tuple(Map.entry("syslog5424_ts", STRING), ts), + tuple(Map.entry("syslog5424_ver", STRING), ver) + ); + testSyslog5424Line( + true, + tuple(Map.entry("process.name", STRING), app), + tuple(Map.entry("host.hostname", STRING), host), + tuple(Map.entry("message", STRING), msg), + tuple(Map.entry("event.code", STRING), null), + tuple(Map.entry("log.syslog.priority", INTEGER), 191), + tuple(Map.entry("process.pid", INTEGER), 4123), + tuple(Map.entry("system.syslog.structured_data", STRING), sd), + tuple(Map.entry("timestamp", STRING), ts), + tuple(Map.entry("system.syslog.version", STRING), ver) + ); + } + + private void testSyslog5424Line( + boolean ecsCompatibility, + Tuple, Object> app, + Tuple, Object> host, + Tuple, Object> msg, + Tuple, Object> msgid, + Tuple, Object> pri, + Tuple, Object> proc, + Tuple, Object> sd, + Tuple, Object> ts, + Tuple, Object> ver + ) { String line = "<191>1 2009-06-30T18:30:00+02:00 paxton.local grokdebug 4123 - [id1 foo=\\\"bar\\\"][id2 baz=\\\"something\\\"] " + - "Hello, syslog."; - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "%{SYSLOG5424LINE}", logger::warn); + "Hello, syslog."; + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "%{SYSLOG5424LINE}", logger::warn); assertCaptureConfig( grok, - Map.ofEntries( - Map.entry("syslog5424_app", STRING), - Map.entry("syslog5424_host", STRING), - Map.entry("syslog5424_msg", STRING), - Map.entry("syslog5424_msgid", STRING), - Map.entry("syslog5424_pri", STRING), - Map.entry("syslog5424_proc", STRING), - Map.entry("syslog5424_sd", STRING), - Map.entry("syslog5424_ts", STRING), - Map.entry("syslog5424_ver", STRING) - ) + Map.ofEntries(app.v1(), host.v1(), msg.v1(), msgid.v1(), pri.v1(), proc.v1(), sd.v1(), ts.v1(), ver.v1()) ); Map matches = grok.captures(line); - assertEquals("191", matches.get("syslog5424_pri")); - assertEquals("1", matches.get("syslog5424_ver")); - assertEquals("2009-06-30T18:30:00+02:00", matches.get("syslog5424_ts")); - assertEquals("paxton.local", matches.get("syslog5424_host")); - assertEquals("grokdebug", matches.get("syslog5424_app")); - assertEquals("4123", matches.get("syslog5424_proc")); - assertEquals(null, matches.get("syslog5424_msgid")); - assertEquals("[id1 foo=\\\"bar\\\"][id2 baz=\\\"something\\\"]", matches.get("syslog5424_sd")); - assertEquals("Hello, syslog.", matches.get("syslog5424_msg")); + assertEquals(pri.v2(), matches.get(pri.v1().getKey())); + assertEquals(ver.v2(), matches.get(ver.v1().getKey())); + assertEquals(ts.v2(), matches.get(ts.v1().getKey())); + assertEquals(host.v2(), matches.get(host.v1().getKey())); + assertEquals(app.v2(), matches.get(app.v1().getKey())); + assertEquals(proc.v2(), matches.get(proc.v1().getKey())); + assertEquals(msgid.v2(), matches.get(msgid.v1().getKey())); + assertEquals(sd.v2(), matches.get(sd.v1().getKey())); + assertEquals(msg.v2(), matches.get(msg.v1().getKey())); } public void testDatePattern() { + testDatePattern(false); + testDatePattern(true); + } + + private void testDatePattern(boolean ecsCompatibility) { String line = "fancy 12-12-12 12:12:12"; - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "(?%{DATE_EU} %{TIME})", logger::warn); + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "(?%{DATE_EU} %{TIME})", logger::warn); assertCaptureConfig(grok, Map.of("timestamp", STRING)); Map matches = grok.captures(line); assertEquals("12-12-12 12:12:12", matches.get("timestamp")); } public void testNilCoercedValues() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "test (N/A|%{BASE10NUM:duration:float}ms)", logger::warn); + testNilCoercedValues(false); + testNilCoercedValues(true); + } + + private void testNilCoercedValues(boolean ecsCompatibility) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "test (N/A|%{BASE10NUM:duration:float}ms)", logger::warn); assertCaptureConfig(grok, Map.of("duration", FLOAT)); Map matches = grok.captures("test 28.4ms"); assertEquals(28.4f, matches.get("duration")); @@ -153,7 +259,12 @@ public void testNilCoercedValues() { } public void testNilWithNoCoercion() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "test (N/A|%{BASE10NUM:duration}ms)", logger::warn); + testNilWithNoCoercion(false); + testNilWithNoCoercion(true); + } + + private void testNilWithNoCoercion(boolean ecsCompatibility) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "test (N/A|%{BASE10NUM:duration}ms)", logger::warn); assertCaptureConfig(grok, Map.of("duration", STRING)); Map matches = grok.captures("test 28.4ms"); assertEquals("28.4", matches.get("duration")); @@ -162,9 +273,17 @@ public void testNilWithNoCoercion() { } public void testUnicodeSyslog() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "<%{POSINT:syslog_pri}>%{SPACE}%{SYSLOGTIMESTAMP:syslog_timestamp} " + + testUnicodeSyslog(false); + testUnicodeSyslog(true); + } + + private void testUnicodeSyslog(boolean ecsCompatibility) { + Grok grok = new Grok( + Grok.getBuiltinPatterns(ecsCompatibility), + "<%{POSINT:syslog_pri}>%{SPACE}%{SYSLOGTIMESTAMP:syslog_timestamp} " + "%{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}(:?)(?:\\[%{GREEDYDATA:syslog_pid}\\])?(:?) " + - "%{GREEDYDATA:syslog_message}", logger::warn); + "%{GREEDYDATA:syslog_message}", logger::warn + ); assertCaptureConfig( grok, Map.ofEntries( @@ -185,21 +304,36 @@ public void testUnicodeSyslog() { } public void testNamedFieldsWithWholeTextMatch() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "%{DATE_EU:stimestamp}", logger::warn); + testNamedFieldsWithWholeTextMatch(false); + testNamedFieldsWithWholeTextMatch(true); + } + + private void testNamedFieldsWithWholeTextMatch(boolean ecsCompatibility) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "%{DATE_EU:stimestamp}", logger::warn); assertCaptureConfig(grok, Map.of("stimestamp", STRING)); Map matches = grok.captures("11/01/01"); assertThat(matches.get("stimestamp"), equalTo("11/01/01")); } public void testWithOniguramaNamedCaptures() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "(?\\w+)", logger::warn); + testWithOniguramaNamedCaptures(false); + testWithOniguramaNamedCaptures(true); + } + + private void testWithOniguramaNamedCaptures(boolean ecsCompatibility) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "(?\\w+)", logger::warn); assertCaptureConfig(grok, Map.of("foo", STRING)); Map matches = grok.captures("hello world"); assertThat(matches.get("foo"), equalTo("hello")); } public void testISO8601() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "^%{TIMESTAMP_ISO8601}$", logger::warn); + testISO8601(false); + testISO8601(true); + } + + private void testISO8601(boolean ecsCompatibility) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "^%{TIMESTAMP_ISO8601}$", logger::warn); assertCaptureConfig(grok, Map.of()); List timeMessages = Arrays.asList( "2001-01-01T00:00:00", @@ -224,7 +358,12 @@ public void testISO8601() { } public void testNotISO8601() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "^%{TIMESTAMP_ISO8601}$", logger::warn); + testNotISO8601(false, List.of("2001-01-01T0:00:00")); // legacy patterns do not permit single-digit hours + testNotISO8601(true, List.of()); + } + + private void testNotISO8601(boolean ecsCompatibility, List additionalCases) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "^%{TIMESTAMP_ISO8601}$", logger::warn); assertCaptureConfig(grok, Map.of()); List timeMessages = Arrays.asList( "2001-13-01T00:00:00", // invalid month @@ -234,7 +373,6 @@ public void testNotISO8601() { "2001-01-aT00:00:00", // invalid day "2001-01-1aT00:00:00", // invalid day "2001-01-01Ta0:00:00", // invalid hour - "2001-01-01T0:00:00", // invalid hour "2001-01-01T25:00:00", // invalid hour "2001-01-01T01:60:00", // invalid minute "2001-01-01T00:aa:00", // invalid minute @@ -250,7 +388,9 @@ public void testNotISO8601() { "2001-01-01T00:00:00-2500", // invalid timezone "2001-01-01T00:00:00-00:61" // invalid timezone ); - for (String msg : timeMessages) { + List timesToTest = new ArrayList<>(timeMessages); + timesToTest.addAll(additionalCases); + for (String msg : timesToTest) { assertThat(grok.match(msg), is(false)); } } @@ -350,8 +490,13 @@ public void testCircularSelfReference() { } public void testBooleanCaptures() { + testBooleanCaptures(false); + testBooleanCaptures(true); + } + + private void testBooleanCaptures(boolean ecsCompatibility) { String pattern = "%{WORD:name}=%{WORD:status:boolean}"; - Grok g = new Grok(Grok.BUILTIN_PATTERNS, pattern, logger::warn); + Grok g = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), pattern, logger::warn); assertCaptureConfig(g, Map.of("name", STRING, "status", BOOLEAN)); String text = "active=true"; @@ -451,42 +596,101 @@ public void testGarbageTypeNameBecomesString() { } public void testApacheLog() { + final String agent = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.12785 " + + "YaBrowser/13.12.1599.12785 Safari/537.36"; + final String clientIp = "31.184.238.164"; + final String timestamp = "24/Jul/2014:05:35:37 +0530"; + final String verb = "GET"; + final String request = "/logs/access.log"; + final String httpVersion = "1.0"; + final String referrer = "http://8rursodiol.enjin.com"; + + testApacheLog( + false, + tuple(Map.entry("agent", STRING), "\"" + agent + "\""), + tuple(Map.entry("auth", STRING), "-"), + tuple(Map.entry("bytes", STRING), "69849"), + tuple(Map.entry("clientip", STRING), clientIp), + tuple(Map.entry("httpversion", STRING), httpVersion), + tuple(Map.entry("ident", STRING), "-"), + tuple(Map.entry("rawrequest", STRING), null), + tuple(Map.entry("referrer", STRING), "\"" + referrer + "\""), + tuple(Map.entry("request", STRING), request), + tuple(Map.entry("timestamp", STRING), timestamp), + tuple(Map.entry("verb", STRING), verb), + List.of(tuple(Map.entry("response", STRING), "200")) + ); + testApacheLog( + true, + tuple(Map.entry("user_agent.original", STRING), agent), + tuple(Map.entry("user.name", STRING), null), + tuple(Map.entry("http.response.body.bytes", LONG), 69849L), + tuple(Map.entry("source.address", STRING), clientIp), + tuple(Map.entry("http.version", STRING), httpVersion), + tuple(Map.entry("apache.access.user.identity", STRING), null), + tuple(Map.entry("http.response.status_code", INTEGER), 200), + tuple(Map.entry("http.request.referrer", STRING), referrer), + tuple(Map.entry("url.original", STRING), request), + tuple(Map.entry("timestamp", STRING), timestamp), + tuple(Map.entry("http.request.method", STRING), verb), + List.of() + ); + } + + public void testApacheLog( + boolean ecsCompatibility, + Tuple, Object> agent, + Tuple, Object> auth, + Tuple, Object> bytes, + Tuple, Object> clientIp, + Tuple, Object> httpVersion, + Tuple, Object> ident, + Tuple, Object> rawRequest, + Tuple, Object> referrer, + Tuple, Object> request, + Tuple, Object> timestamp, + Tuple, Object> verb, + List, Object>> additionalFields + ) { String logLine = "31.184.238.164 - - [24/Jul/2014:05:35:37 +0530] \"GET /logs/access.log HTTP/1.0\" 200 69849 " + "\"http://8rursodiol.enjin.com\" \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) " + "Chrome/30.0.1599.12785 YaBrowser/13.12.1599.12785 Safari/537.36\" \"www.dlwindianrailways.com\""; - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "%{COMBINEDAPACHELOG}", logger::warn); - assertCaptureConfig( - grok, - Map.ofEntries( - Map.entry("agent", STRING), - Map.entry("auth", STRING), - Map.entry("bytes", STRING), - Map.entry("clientip", STRING), - Map.entry("httpversion", STRING), - Map.entry("ident", STRING), - Map.entry("rawrequest", STRING), - Map.entry("referrer", STRING), - Map.entry("request", STRING), - Map.entry("response", STRING), - Map.entry("timestamp", STRING), - Map.entry("verb", STRING) - ) - ); + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "%{COMBINEDAPACHELOG}", logger::warn); + + Map captureTypes = new HashMap<>(); + captureTypes.put(agent.v1().getKey(), agent.v1().getValue()); + captureTypes.put(auth.v1().getKey(), auth.v1().getValue()); + captureTypes.put(bytes.v1().getKey(), bytes.v1().getValue()); + captureTypes.put(clientIp.v1().getKey(), clientIp.v1().getValue()); + captureTypes.put(httpVersion.v1().getKey(), httpVersion.v1().getValue()); + captureTypes.put(ident.v1().getKey(), ident.v1().getValue()); + captureTypes.put(rawRequest.v1().getKey(), rawRequest.v1().getValue()); + captureTypes.put(referrer.v1().getKey(), referrer.v1().getValue()); + captureTypes.put(request.v1().getKey(), request.v1().getValue()); + captureTypes.put(timestamp.v1().getKey(), timestamp.v1().getValue()); + captureTypes.put(verb.v1().getKey(), verb.v1().getValue()); + for (var additionalField : additionalFields) { + captureTypes.put(additionalField.v1().getKey(), additionalField.v1().getValue()); + } + + assertCaptureConfig(grok, captureTypes); Map matches = grok.captures(logLine); - assertEquals("31.184.238.164", matches.get("clientip")); - assertEquals("-", matches.get("ident")); - assertEquals("-", matches.get("auth")); - assertEquals("24/Jul/2014:05:35:37 +0530", matches.get("timestamp")); - assertEquals("GET", matches.get("verb")); - assertEquals("/logs/access.log", matches.get("request")); - assertEquals("1.0", matches.get("httpversion")); - assertEquals("200", matches.get("response")); - assertEquals("69849", matches.get("bytes")); - assertEquals("\"http://8rursodiol.enjin.com\"", matches.get("referrer")); + assertEquals(clientIp.v2(), matches.get(clientIp.v1().getKey())); + assertEquals(ident.v2(), matches.get(ident.v1().getKey())); + assertEquals(auth.v2(), matches.get(auth.v1().getKey())); + assertEquals(timestamp.v2(), matches.get(timestamp.v1().getKey())); + assertEquals(verb.v2(), matches.get(verb.v1().getKey())); + assertEquals(request.v2(), matches.get(request.v1().getKey())); + assertEquals(httpVersion.v2(), matches.get(httpVersion.v1().getKey())); + assertEquals(bytes.v2(), matches.get(bytes.v1().getKey())); + assertEquals(referrer.v2(), matches.get(referrer.v1().getKey())); assertEquals(null, matches.get("port")); - assertEquals("\"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.12785 " + - "YaBrowser/13.12.1599.12785 Safari/537.36\"", matches.get("agent")); + assertEquals(agent.v2(), matches.get(agent.v1().getKey())); + assertEquals(rawRequest.v2(), matches.get(rawRequest.v1().getKey())); + for (var additionalField : additionalFields) { + assertEquals(additionalField.v2(), matches.get(additionalField.v1().getKey())); + } } public void testComplete() { @@ -587,6 +791,11 @@ public void testMultipleNamedCapturesWithSameName() { } public void testExponentialExpressions() { + testExponentialExpressions(false); + testExponentialExpressions(true); + } + + private void testExponentialExpressions(boolean ecsCompatibility) { AtomicBoolean run = new AtomicBoolean(true); // to avoid a lingering thread when test has completed String grokPattern = "Bonsuche mit folgender Anfrage: Belegart->\\[%{WORD:param2},(?(\\s*%{NOTSPACE})*)\\] " + @@ -606,8 +815,12 @@ public void testExponentialExpressions() { }); t.start(); }; - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, grokPattern, MatcherWatchdog.newInstance(10, 200, System::currentTimeMillis, scheduler), - logger::warn); + Grok grok = new Grok( + Grok.getBuiltinPatterns(ecsCompatibility), + grokPattern, + MatcherWatchdog.newInstance(10, 200, System::currentTimeMillis, scheduler), + logger::warn + ); Exception e = expectThrows(RuntimeException.class, () -> grok.captures(logLine)); run.set(false); assertThat(e.getMessage(), equalTo("grok pattern matching was interrupted after [200] ms")); @@ -647,24 +860,44 @@ public void testAlphanumericFieldName() { } public void testUnsupportedBracketsInFieldName() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "%{WORD:unsuppo(r)ted}", logger::warn); + testUnsupportedBracketsInFieldName(false); + testUnsupportedBracketsInFieldName(true); + } + + private void testUnsupportedBracketsInFieldName(boolean ecsCompatibility) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "%{WORD:unsuppo(r)ted}", logger::warn); Map matches = grok.captures("line"); assertNull(matches); } public void testJavaClassPatternWithUnderscore() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "%{JAVACLASS}", logger::warn); + testJavaClassPatternWithUnderscore(false); + testJavaClassPatternWithUnderscore(true); + } + + private void testJavaClassPatternWithUnderscore(boolean ecsCompatibility) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "%{JAVACLASS}", logger::warn); assertThat(grok.match("Test_Class.class"), is(true)); } public void testJavaFilePatternWithSpaces() { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "%{JAVAFILE}", logger::warn); + testJavaFilePatternWithSpaces(false); + testJavaFilePatternWithSpaces(true); + } + + private void testJavaFilePatternWithSpaces(boolean ecsCompatibility) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "%{JAVAFILE}", logger::warn); assertThat(grok.match("Test Class.java"), is(true)); } - public void testLogCallBack(){ + public void testLogCallBack() { + testLogCallBack(false); + testLogCallBack(true); + } + + private void testLogCallBack(boolean ecsCompatibility) { AtomicReference message = new AtomicReference<>(); - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, ".*\\[.*%{SPACE}*\\].*", message::set); + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), ".*\\[.*%{SPACE}*\\].*", message::set); grok.match("[foo]"); //this message comes from Joni, so updates to Joni may change the expectation assertThat(message.get(), containsString("regular expression has redundant nested repeat operator")); @@ -672,16 +905,25 @@ public void testLogCallBack(){ private void assertGrokedField(String fieldName) { String line = "foo"; - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "%{WORD:" + fieldName + "}", logger::warn); - Map matches = grok.captures(line); - assertEquals(line, matches.get(fieldName)); + // test both with and without ECS compatibility + for (boolean ecsCompatibility : new boolean[]{false, true}) { + Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "%{WORD:" + fieldName + "}", logger::warn); + Map matches = grok.captures(line); + assertEquals(line, matches.get(fieldName)); + } } private void assertCaptureConfig(Grok grok, Map nameToType) { + assertCaptureConfig(grok, nameToType, List.of()); + } + + private void assertCaptureConfig(Grok grok, Map nameToType, List acceptedDuplicates) { Map fromGrok = new TreeMap<>(); for (GrokCaptureConfig config : grok.captureConfig()) { Object old = fromGrok.put(config.name(), config.type()); - assertThat("duplicates not allowed", old, nullValue()); + if (acceptedDuplicates.contains(config.name()) == false) { + assertThat("duplicates not allowed", old, nullValue()); + } } assertThat(fromGrok, equalTo(new TreeMap<>(nameToType))); } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java index 7e71519223214..73f26b895ff57 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java @@ -26,6 +26,8 @@ public final class GrokProcessor extends AbstractProcessor { public static final String TYPE = "grok"; + public static final String DEFAULT_ECS_COMPATIBILITY_MODE = Grok.ECS_COMPATIBILITY_MODES[0]; + private static final String PATTERN_MATCH_KEY = "_ingest._grok_match_index"; private static final Logger logger = LogManager.getLogger(GrokProcessor.class); @@ -127,11 +129,9 @@ static String combinePatterns(List patterns, boolean traceMatch) { public static final class Factory implements Processor.Factory { - private final Map builtinPatterns; private final MatcherWatchdog matcherWatchdog; - public Factory(Map builtinPatterns, MatcherWatchdog matcherWatchdog) { - this.builtinPatterns = builtinPatterns; + public Factory(MatcherWatchdog matcherWatchdog) { this.matcherWatchdog = matcherWatchdog; } @@ -142,12 +142,19 @@ public GrokProcessor create(Map registry, String proc List matchPatterns = ConfigurationUtils.readList(TYPE, processorTag, config, "patterns"); boolean traceMatch = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "trace_match", false); boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); + String ecsCompatibility = + ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "ecs_compatibility", DEFAULT_ECS_COMPATIBILITY_MODE); + if (Grok.isValidEcsCompatibilityMode(ecsCompatibility) == false) { + throw newConfigurationException(TYPE, processorTag, "ecs_compatibility", "unsupported mode '" + ecsCompatibility + "'"); + } if (matchPatterns.isEmpty()) { throw newConfigurationException(TYPE, processorTag, "patterns", "List of patterns must not be empty"); } Map customPatternBank = ConfigurationUtils.readOptionalMap(TYPE, processorTag, config, "pattern_definitions"); - Map patternBank = new HashMap<>(builtinPatterns); + Map patternBank = new HashMap<>( + Grok.getBuiltinPatterns(ecsCompatibility) + ); if (customPatternBank != null) { patternBank.putAll(customPatternBank); } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 0ec7b237dea7e..b8e0aea827a29 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.ingest.common; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -46,14 +47,19 @@ private GrokProcessorGetAction() { public static class Request extends ActionRequest { private final boolean sorted; + private final String ecsCompatibility; - public Request(boolean sorted) { + public Request(boolean sorted, String ecsCompatibility) { this.sorted = sorted; + this.ecsCompatibility = ecsCompatibility; } Request(StreamInput in) throws IOException { super(in); this.sorted = in.readBoolean(); + this.ecsCompatibility = in.getVersion().onOrAfter(Version.V_8_0_0) + ? in.readString() + : GrokProcessor.DEFAULT_ECS_COMPATIBILITY_MODE; } @Override @@ -65,11 +71,18 @@ public ActionRequestValidationException validate() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeBoolean(sorted); + if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + out.writeString(ecsCompatibility); + } } public boolean sorted() { return sorted; } + + public String getEcsCompatibility() { + return ecsCompatibility; + } } public static class Response extends ActionResponse implements ToXContentObject { @@ -105,25 +118,38 @@ public void writeTo(StreamOutput out) throws IOException { public static class TransportAction extends HandledTransportAction { - private final Map grokPatterns; - private final Map sortedGrokPatterns; + private final Map legacyGrokPatterns; + private final Map sortedLegacyGrokPatterns; + private final Map ecsV1GrokPatterns; + private final Map sortedEcsV1GrokPatterns; @Inject public TransportAction(TransportService transportService, ActionFilters actionFilters) { - this(transportService, actionFilters, Grok.BUILTIN_PATTERNS); + this(transportService, actionFilters, Grok.getBuiltinPatterns(false), Grok.getBuiltinPatterns(true)); } // visible for testing - TransportAction(TransportService transportService, ActionFilters actionFilters, Map grokPatterns) { + TransportAction( + TransportService transportService, + ActionFilters actionFilters, + Map legacyGrokPatterns, + Map ecsV1GrokPatterns) { super(NAME, transportService, actionFilters, Request::new); - this.grokPatterns = grokPatterns; - this.sortedGrokPatterns = new TreeMap<>(this.grokPatterns); + this.legacyGrokPatterns = legacyGrokPatterns; + this.sortedLegacyGrokPatterns = new TreeMap<>(this.legacyGrokPatterns); + this.ecsV1GrokPatterns = ecsV1GrokPatterns; + this.sortedEcsV1GrokPatterns = new TreeMap<>(this.ecsV1GrokPatterns); } @Override protected void doExecute(Task task, Request request, ActionListener listener) { try { - listener.onResponse(new Response(request.sorted() ? sortedGrokPatterns : grokPatterns)); + listener.onResponse(new Response( + request.getEcsCompatibility().equals(Grok.ECS_COMPATIBILITY_MODES[0]) + ? request.sorted() ? sortedLegacyGrokPatterns : legacyGrokPatterns + : request.sorted() ? sortedEcsV1GrokPatterns : ecsV1GrokPatterns + ) + ); } catch (Exception e) { listener.onFailure(e); } @@ -145,7 +171,11 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { boolean sorted = request.paramAsBoolean("s", false); - Request grokPatternsRequest = new Request(sorted); + String ecsCompatibility = request.param("ecs_compatibility", GrokProcessor.DEFAULT_ECS_COMPATIBILITY_MODE); + if (Grok.isValidEcsCompatibilityMode(ecsCompatibility) == false) { + throw new IllegalArgumentException("unsupported ECS compatibility mode [" + ecsCompatibility + "]"); + } + Request grokPatternsRequest = new Request(sorted, ecsCompatibility); return channel -> client.executeLocally(INSTANCE, grokPatternsRequest, new RestToXContentListener<>(channel)); } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index 9bb5a0c3fa430..945ce13957124 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.grok.Grok; import org.elasticsearch.grok.MatcherWatchdog; import org.elasticsearch.ingest.DropProcessor; import org.elasticsearch.ingest.PipelineProcessor; @@ -66,7 +65,7 @@ public Map getProcessors(Processor.Parameters paramet entry(ForEachProcessor.TYPE, new ForEachProcessor.Factory(parameters.scriptService)), entry(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory(parameters.scriptService)), entry(SortProcessor.TYPE, new SortProcessor.Factory()), - entry(GrokProcessor.TYPE, new GrokProcessor.Factory(Grok.BUILTIN_PATTERNS, createGrokThreadWatchdog(parameters))), + entry(GrokProcessor.TYPE, new GrokProcessor.Factory(createGrokThreadWatchdog(parameters))), entry(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService)), entry(DotExpanderProcessor.TYPE, new DotExpanderProcessor.Factory()), entry(JsonProcessor.TYPE, new JsonProcessor.Factory()), diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java index 1bd85c2aca913..c9d0c0f49e6ee 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java @@ -23,7 +23,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { public void testBuild() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap(), MatcherWatchdog.noop()); + GrokProcessor.Factory factory = new GrokProcessor.Factory(MatcherWatchdog.noop()); Map config = new HashMap<>(); config.put("field", "_field"); @@ -37,7 +37,7 @@ public void testBuild() throws Exception { } public void testBuildWithIgnoreMissing() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap(), MatcherWatchdog.noop()); + GrokProcessor.Factory factory = new GrokProcessor.Factory(MatcherWatchdog.noop()); Map config = new HashMap<>(); config.put("field", "_field"); @@ -52,7 +52,7 @@ public void testBuildWithIgnoreMissing() throws Exception { } public void testBuildMissingField() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap(), MatcherWatchdog.noop()); + GrokProcessor.Factory factory = new GrokProcessor.Factory(MatcherWatchdog.noop()); Map config = new HashMap<>(); config.put("patterns", Collections.singletonList("(?\\w+)")); ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config)); @@ -60,7 +60,7 @@ public void testBuildMissingField() throws Exception { } public void testBuildMissingPatterns() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap(), MatcherWatchdog.noop()); + GrokProcessor.Factory factory = new GrokProcessor.Factory(MatcherWatchdog.noop()); Map config = new HashMap<>(); config.put("field", "foo"); ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config)); @@ -68,7 +68,7 @@ public void testBuildMissingPatterns() throws Exception { } public void testBuildEmptyPatternsList() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap(), MatcherWatchdog.noop()); + GrokProcessor.Factory factory = new GrokProcessor.Factory(MatcherWatchdog.noop()); Map config = new HashMap<>(); config.put("field", "foo"); config.put("patterns", Collections.emptyList()); @@ -77,7 +77,7 @@ public void testBuildEmptyPatternsList() throws Exception { } public void testCreateWithCustomPatterns() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap(), MatcherWatchdog.noop()); + GrokProcessor.Factory factory = new GrokProcessor.Factory(MatcherWatchdog.noop()); Map config = new HashMap<>(); config.put("field", "_field"); @@ -90,7 +90,7 @@ public void testCreateWithCustomPatterns() throws Exception { } public void testCreateWithInvalidPattern() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap(), MatcherWatchdog.noop()); + GrokProcessor.Factory factory = new GrokProcessor.Factory(MatcherWatchdog.noop()); Map config = new HashMap<>(); config.put("field", "_field"); config.put("patterns", Collections.singletonList("[")); @@ -99,7 +99,7 @@ public void testCreateWithInvalidPattern() throws Exception { } public void testCreateWithInvalidPatternDefinition() throws Exception { - GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap(), MatcherWatchdog.noop()); + GrokProcessor.Factory factory = new GrokProcessor.Factory(MatcherWatchdog.noop()); Map config = new HashMap<>(); config.put("field", "_field"); config.put("patterns", Collections.singletonList("%{MY_PATTERN:name}!")); @@ -108,4 +108,15 @@ public void testCreateWithInvalidPatternDefinition() throws Exception { assertThat(e.getMessage(), equalTo("[patterns] Invalid regex pattern found in: [%{MY_PATTERN:name}!]. premature end of char-class")); } + + public void testCreateWithInvalidEcsCompatibilityMode() throws Exception { + GrokProcessor.Factory factory = new GrokProcessor.Factory(MatcherWatchdog.noop()); + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("patterns", Collections.singletonList("(?\\w+)")); + String invalidEcsMode = randomAlphaOfLength(3); + config.put("ecs_compatibility", invalidEcsMode); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config)); + assertThat(e.getMessage(), equalTo("[ecs_compatibility] unsupported mode '" + invalidEcsMode + "'")); + } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java index cec1f08270df4..efae348adea33 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.grok.Grok; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; @@ -33,10 +34,11 @@ import static org.mockito.Mockito.mock; public class GrokProcessorGetActionTests extends ESTestCase { - private static final Map TEST_PATTERNS = Map.of("PATTERN2", "foo2", "PATTERN1", "foo1"); + private static final Map LEGACY_TEST_PATTERNS = Map.of("PATTERN2", "foo2", "PATTERN1", "foo1"); + private static final Map ECS_TEST_PATTERNS = Map.of("ECS_PATTERN2", "foo2", "ECS_PATTERN1", "foo1"); public void testRequest() throws Exception { - GrokProcessorGetAction.Request request = new GrokProcessorGetAction.Request(false); + GrokProcessorGetAction.Request request = new GrokProcessorGetAction.Request(false, GrokProcessor.DEFAULT_ECS_COMPATIBILITY_MODE); BytesStreamOutput out = new BytesStreamOutput(); request.writeTo(out); StreamInput streamInput = out.bytes().streamInput(); @@ -45,55 +47,96 @@ public void testRequest() throws Exception { } public void testResponseSerialization() throws Exception { - GrokProcessorGetAction.Response response = new GrokProcessorGetAction.Response(TEST_PATTERNS); + GrokProcessorGetAction.Response response = new GrokProcessorGetAction.Response(LEGACY_TEST_PATTERNS); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); StreamInput streamInput = out.bytes().streamInput(); GrokProcessorGetAction.Response otherResponse = new GrokProcessorGetAction.Response(streamInput); - assertThat(response.getGrokPatterns(), equalTo(TEST_PATTERNS)); + assertThat(response.getGrokPatterns(), equalTo(LEGACY_TEST_PATTERNS)); assertThat(response.getGrokPatterns(), equalTo(otherResponse.getGrokPatterns())); } public void testResponseSorting() { - List sortedKeys = new ArrayList<>(TEST_PATTERNS.keySet()); + List sortedKeys = new ArrayList<>(LEGACY_TEST_PATTERNS.keySet()); Collections.sort(sortedKeys); - GrokProcessorGetAction.TransportAction transportAction = - new GrokProcessorGetAction.TransportAction(mock(TransportService.class), mock(ActionFilters.class), TEST_PATTERNS); + GrokProcessorGetAction.TransportAction transportAction = new GrokProcessorGetAction.TransportAction( + mock(TransportService.class), + mock(ActionFilters.class), + LEGACY_TEST_PATTERNS, + ECS_TEST_PATTERNS + ); GrokProcessorGetAction.Response[] receivedResponse = new GrokProcessorGetAction.Response[1]; - transportAction.doExecute(null, new GrokProcessorGetAction.Request(true), new ActionListener<>() { - @Override - public void onResponse(GrokProcessorGetAction.Response response) { + transportAction.doExecute( + null, + new GrokProcessorGetAction.Request(true, GrokProcessor.DEFAULT_ECS_COMPATIBILITY_MODE), + new ActionListener<>() { + @Override + public void onResponse(GrokProcessorGetAction.Response response) { receivedResponse[0] = response; } - @Override - public void onFailure(Exception e) { + @Override + public void onFailure(Exception e) { fail(); } - }); + } + ); assertThat(receivedResponse[0], notNullValue()); assertThat(receivedResponse[0].getGrokPatterns().keySet().toArray(), equalTo(sortedKeys.toArray())); GrokProcessorGetAction.Response firstResponse = receivedResponse[0]; - transportAction.doExecute(null, new GrokProcessorGetAction.Request(true), new ActionListener<>() { - @Override - public void onResponse(GrokProcessorGetAction.Response response) { + transportAction.doExecute( + null, + new GrokProcessorGetAction.Request(true, GrokProcessor.DEFAULT_ECS_COMPATIBILITY_MODE), + new ActionListener<>() { + @Override + public void onResponse(GrokProcessorGetAction.Response response) { receivedResponse[0] = response; } - @Override - public void onFailure(Exception e) { + @Override + public void onFailure(Exception e) { fail(); } - }); + } + ); assertThat(receivedResponse[0], notNullValue()); assertThat(receivedResponse[0], not(sameInstance(firstResponse))); assertThat(receivedResponse[0].getGrokPatterns(), sameInstance(firstResponse.getGrokPatterns())); } + public void testEcsCompatibilityMode() { + List sortedKeys = new ArrayList<>(ECS_TEST_PATTERNS.keySet()); + Collections.sort(sortedKeys); + GrokProcessorGetAction.TransportAction transportAction = new GrokProcessorGetAction.TransportAction( + mock(TransportService.class), + mock(ActionFilters.class), + LEGACY_TEST_PATTERNS, + ECS_TEST_PATTERNS + ); + GrokProcessorGetAction.Response[] receivedResponse = new GrokProcessorGetAction.Response[1]; + transportAction.doExecute( + null, + new GrokProcessorGetAction.Request(true, Grok.ECS_COMPATIBILITY_MODES[1]), + new ActionListener<>() { + @Override + public void onResponse(GrokProcessorGetAction.Response response) { + receivedResponse[0] = response; + } + + @Override + public void onFailure(Exception e) { + fail(); + } + } + ); + assertThat(receivedResponse[0], notNullValue()); + assertThat(receivedResponse[0].getGrokPatterns().keySet().toArray(), equalTo(sortedKeys.toArray())); + } + @SuppressWarnings("unchecked") public void testResponseToXContent() throws Exception { - GrokProcessorGetAction.Response response = new GrokProcessorGetAction.Response(TEST_PATTERNS); + GrokProcessorGetAction.Response response = new GrokProcessorGetAction.Response(LEGACY_TEST_PATTERNS); try (XContentBuilder builder = JsonXContent.contentBuilder()) { response.toXContent(builder, ToXContent.EMPTY_PARAMS); Map converted = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); diff --git a/modules/runtime-fields-common/src/main/java/org/elasticsearch/runtimefields/NamedGroupExtractor.java b/modules/runtime-fields-common/src/main/java/org/elasticsearch/runtimefields/NamedGroupExtractor.java index 90a166d34f64f..26c1dc3178d5c 100644 --- a/modules/runtime-fields-common/src/main/java/org/elasticsearch/runtimefields/NamedGroupExtractor.java +++ b/modules/runtime-fields-common/src/main/java/org/elasticsearch/runtimefields/NamedGroupExtractor.java @@ -99,13 +99,13 @@ public Grok run() { try { // Try to collect warnings up front and refuse to compile the expression if there are any List warnings = new ArrayList<>(); - new Grok(Grok.BUILTIN_PATTERNS, pattern, watchdog, warnings::add).match("__nomatch__"); + new Grok(Grok.getBuiltinPatterns(false), pattern, watchdog, warnings::add).match("__nomatch__"); if (false == warnings.isEmpty()) { throw new IllegalArgumentException("emitted warnings: " + warnings); } return new Grok( - Grok.BUILTIN_PATTERNS, + Grok.getBuiltinPatterns(false), pattern, watchdog, w -> { throw new IllegalArgumentException("grok [" + pattern + "] emitted a warning: " + w); } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml index 27f7f804ead1c..d748a2388bdd4 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml @@ -67,6 +67,63 @@ - match: { _source.geoip.continent_name: "North America" } - match: { _source.geoip.country_iso_code: "US" } +--- +"Test with date processor and ECS-v1": + - do: + ingest.put_pipeline: + id: "_id" + body: > + { + "processors": [ + { + "grok" : { + "field" : "log", + "ecs_compatibility": "v1", + "patterns": ["%{COMBINEDAPACHELOG}"] + } + }, + { + "date" : { + "field" : "timestamp", + "target_field" : "timestamp", + "formats" : ["dd/MMM/yyyy:HH:mm:ss xx"] + } + }, + { + "geoip" : { + "field" : "source.address" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "_id" + body: { + log: "70.193.17.92 - - [08/Sep/2014:02:54:42 +0000] \"GET /presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png HTTP/1.1\" 200 175208 \"http://mobile.rivals.com/board_posts.asp?SID=880&mid=198829575&fid=2208&tid=198829575&Team=&TeamId=&SiteId=\" \"Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36\"" + } + + - do: + get: + index: test + id: 1 + - length: { _source: 7 } + - match: { _source.url.original: "/presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png" } + - match: { _source.user_agent.original: "Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36" } + - match: { _source.http.request.method: "GET" } + - match: { _source.http.request.referrer: "http://mobile.rivals.com/board_posts.asp?SID=880&mid=198829575&fid=2208&tid=198829575&Team=&TeamId=&SiteId=" } + - match: { _source.http.response.status_code: 200 } + - match: { _source.http.response.body.bytes: 175208 } + - match: { _source.source.address: "70.193.17.92" } + - match: { _source.http.version: "1.1" } + - match: { _source.timestamp: "2014-09-08T02:54:42.000Z" } + - match: { _source.geoip.continent_name: "North America" } + - match: { _source.geoip.country_iso_code: "US" } + --- "Test mutate": - do: diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java index 13528a3294ffd..b97d5b4215374 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java @@ -280,7 +280,7 @@ static class GrokPatternCandidate { GrokPatternCandidate(String grokPatternName, String fieldName, String preBreak, String postBreak) { this.grokPatternName = grokPatternName; this.fieldName = fieldName; - this.grok = new Grok(Grok.BUILTIN_PATTERNS, "%{DATA:" + PREFACE + "}" + preBreak + "%{" + grokPatternName + ":this}" + + this.grok = new Grok(Grok.getBuiltinPatterns(false), "%{DATA:" + PREFACE + "}" + preBreak + "%{" + grokPatternName + ":this}" + postBreak + "%{GREEDYDATA:" + EPILOGUE + "}", logger::warn); } } diff --git a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/GrokPatternCreator.java b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/GrokPatternCreator.java index 537b732c65cd6..6e0b1f8eac7f6 100644 --- a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/GrokPatternCreator.java +++ b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/GrokPatternCreator.java @@ -156,9 +156,9 @@ public GrokPatternCreator( this.mappings = mappings; this.fieldStats = fieldStats; if (customGrokPatternDefinitions.isEmpty()) { - grokPatternDefinitions = Grok.BUILTIN_PATTERNS; + grokPatternDefinitions = Grok.getBuiltinPatterns(false); } else { - grokPatternDefinitions = new HashMap<>(Grok.BUILTIN_PATTERNS); + grokPatternDefinitions = new HashMap<>(Grok.getBuiltinPatterns(false)); grokPatternDefinitions.putAll(customGrokPatternDefinitions); } this.timeoutChecker = Objects.requireNonNull(timeoutChecker); @@ -509,7 +509,7 @@ static class ValueOnlyGrokPatternCandidate implements GrokPatternCandidate { fieldName, "\\b", "\\b", - Grok.BUILTIN_PATTERNS + Grok.getBuiltinPatterns(false) ); } @@ -549,7 +549,7 @@ static class ValueOnlyGrokPatternCandidate implements GrokPatternCandidate { fieldName, preBreak, postBreak, - Grok.BUILTIN_PATTERNS + Grok.getBuiltinPatterns(false) ); } @@ -699,7 +699,7 @@ public String processCaptures( throw new IllegalStateException("Cannot process KV matches until a field name has been determined"); } Grok grok = new Grok( - Grok.BUILTIN_PATTERNS, + Grok.getBuiltinPatterns(false), "(?m)%{DATA:" + PREFACE + "}\\b" + fieldName + "=%{USER:" + VALUE + "}%{GREEDYDATA:" + EPILOGUE + "}", TimeoutChecker.watchdog, logger::warn @@ -768,7 +768,7 @@ static class FullMatchGrokPatternCandidate { private final Grok grok; static FullMatchGrokPatternCandidate fromGrokPatternName(String grokPatternName, String timeField) { - return new FullMatchGrokPatternCandidate("%{" + grokPatternName + "}", timeField, Grok.BUILTIN_PATTERNS); + return new FullMatchGrokPatternCandidate("%{" + grokPatternName + "}", timeField, Grok.getBuiltinPatterns(false)); } static FullMatchGrokPatternCandidate fromGrokPatternName( @@ -780,7 +780,7 @@ static FullMatchGrokPatternCandidate fromGrokPatternName( } static FullMatchGrokPatternCandidate fromGrokPattern(String grokPattern, String timeField) { - return new FullMatchGrokPatternCandidate(grokPattern, timeField, Grok.BUILTIN_PATTERNS); + return new FullMatchGrokPatternCandidate(grokPattern, timeField, Grok.getBuiltinPatterns(false)); } static FullMatchGrokPatternCandidate fromGrokPattern( diff --git a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/TextStructureUtils.java b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/TextStructureUtils.java index 5878823e1d2c5..3e77d57001f3b 100644 --- a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/TextStructureUtils.java +++ b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/TextStructureUtils.java @@ -59,19 +59,19 @@ public final class TextStructureUtils { "(?:%{WKT_POINT}|%{WKT_LINESTRING}|%{WKT_MULTIPOINT}|%{WKT_POLYGON}|%{WKT_MULTILINESTRING}|%{WKT_MULTIPOLYGON}|%{WKT_BBOX})" ); patterns.put("WKT_GEOMETRYCOLLECTION", "GEOMETRYCOLLECTION \\(%{WKT_ANY}(?:, %{WKT_ANY})\\)"); - patterns.putAll(Grok.BUILTIN_PATTERNS); + patterns.putAll(Grok.getBuiltinPatterns(false)); EXTENDED_PATTERNS = Collections.unmodifiableMap(patterns); } private static final int NUM_TOP_HITS = 10; // NUMBER Grok pattern doesn't support scientific notation, so we extend it private static final Grok NUMBER_GROK = new Grok( - Grok.BUILTIN_PATTERNS, + Grok.getBuiltinPatterns(false), "^%{NUMBER}(?:[eE][+-]?[0-3]?[0-9]{1,2})?$", TimeoutChecker.watchdog, logger::warn ); - private static final Grok IP_GROK = new Grok(Grok.BUILTIN_PATTERNS, "^%{IP}$", TimeoutChecker.watchdog, logger::warn); + private static final Grok IP_GROK = new Grok(Grok.getBuiltinPatterns(false), "^%{IP}$", TimeoutChecker.watchdog, logger::warn); private static final Grok GEO_POINT_WKT = new Grok(EXTENDED_PATTERNS, "^%{WKT_POINT}$", TimeoutChecker.watchdog, logger::warn); private static final Grok GEO_WKT = new Grok( EXTENDED_PATTERNS, diff --git a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/TimestampFormatFinder.java b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/TimestampFormatFinder.java index 22c8f4442ca7f..794762094e2ab 100644 --- a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/TimestampFormatFinder.java +++ b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/TimestampFormatFinder.java @@ -1706,13 +1706,13 @@ static final class CandidateTimestampFormat { this.strictGrokPattern = Objects.requireNonNull(strictGrokPattern); // The (?m) here has the Ruby meaning, which is equivalent to (?s) in Java this.strictSearchGrok = new Grok( - Grok.BUILTIN_PATTERNS, + Grok.getBuiltinPatterns(false), "(?m)%{DATA:" + PREFACE + "}" + strictGrokPattern + "%{GREEDYDATA:" + EPILOGUE + "}", TimeoutChecker.watchdog, logger::warn ); this.strictFullMatchGrok = new Grok( - Grok.BUILTIN_PATTERNS, + Grok.getBuiltinPatterns(false), "^" + strictGrokPattern + "$", TimeoutChecker.watchdog, logger::warn diff --git a/x-pack/plugin/text-structure/src/test/java/org/elasticsearch/xpack/textstructure/structurefinder/TimeoutCheckerTests.java b/x-pack/plugin/text-structure/src/test/java/org/elasticsearch/xpack/textstructure/structurefinder/TimeoutCheckerTests.java index b7c7e2362a250..611ac92bbe0eb 100644 --- a/x-pack/plugin/text-structure/src/test/java/org/elasticsearch/xpack/textstructure/structurefinder/TimeoutCheckerTests.java +++ b/x-pack/plugin/text-structure/src/test/java/org/elasticsearch/xpack/textstructure/structurefinder/TimeoutCheckerTests.java @@ -81,7 +81,7 @@ public void testWatchdog() throws Exception { } public void testGrokCaptures() throws Exception { - Grok grok = new Grok(Grok.BUILTIN_PATTERNS, "{%DATA:data}{%GREEDYDATA:greedydata}", TimeoutChecker.watchdog, logger::warn); + Grok grok = new Grok(Grok.getBuiltinPatterns(false), "{%DATA:data}{%GREEDYDATA:greedydata}", TimeoutChecker.watchdog, logger::warn); TimeValue timeout = TimeValue.timeValueMillis(1); try (TimeoutChecker timeoutChecker = new TimeoutChecker("grok captures test", timeout, scheduler)) { From f89eda5f9d89fa6b197dd00cb1dd700b78880887 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 31 Aug 2021 15:28:57 +0200 Subject: [PATCH 039/128] Fix Snapshot BwC Version Randomization Behavior (#77057) The randomization of the repo version often wasn't used because of the repository cache. Force re-creating the repository every time we manually mess with the versions. --- .../repositories/IndexSnapshotsServiceIT.java | 3 --- .../org/elasticsearch/snapshots/CloneSnapshotIT.java | 3 --- .../snapshots/CorruptedBlobStoreRepositoryIT.java | 12 +----------- .../snapshots/AbstractSnapshotIntegTestCase.java | 11 +++++++++-- 4 files changed, 10 insertions(+), 19 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java index d69d0b562abcd..a936e04f2d7bb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java @@ -115,9 +115,6 @@ public void testGetShardSnapshotReturnsTheLatestSuccessfulSnapshot() throws Exce if (useBwCFormat) { final Version version = randomVersionBetween(random(), Version.V_7_5_0, Version.CURRENT); initWithSnapshotVersion(repoName, repoPath, version); - // Re-create repo to clear repository data cache - assertAcked(clusterAdmin().prepareDeleteRepository(repoName).get()); - createRepository(repoName, "fs", repoPath); } createSnapshot(repoName, "empty-snap", Collections.emptyList()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java index 3ccb6d20d66f6..d756056f1ba77 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java @@ -58,9 +58,6 @@ public void testShardClone() throws Exception { final boolean useBwCFormat = randomBoolean(); if (useBwCFormat) { initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); - // Re-create repo to clear repository data cache - assertAcked(clusterAdmin().prepareDeleteRepository(repoName).get()); - createRepository(repoName, "fs", repoPath); } final String indexName = "test-index"; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java index 219b72516493f..5f497ebf7b125 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CorruptedBlobStoreRepositoryIT.java @@ -33,7 +33,6 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.threadpool.ThreadPool; -import java.io.IOException; import java.nio.channels.SeekableByteChannel; import java.nio.file.Files; import java.nio.file.Path; @@ -42,7 +41,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -368,16 +366,12 @@ public void testMountCorruptedRepositoryData() throws Exception { expectThrows(RepositoryException.class, () -> getRepositoryData(otherRepo)); } - public void testHandleSnapshotErrorWithBwCFormat() throws IOException, ExecutionException, InterruptedException { + public void testHandleSnapshotErrorWithBwCFormat() throws Exception { final String repoName = "test-repo"; final Path repoPath = randomRepoPath(); createRepository(repoName, "fs", repoPath); final String oldVersionSnapshot = initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); - logger.info("--> recreating repository to clear caches"); - client().admin().cluster().prepareDeleteRepository(repoName).get(); - createRepository(repoName, "fs", repoPath); - final String indexName = "test-index"; createIndex(indexName); @@ -403,10 +397,6 @@ public void testRepairBrokenShardGenerations() throws Exception { createRepository(repoName, "fs", repoPath); final String oldVersionSnapshot = initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT); - logger.info("--> recreating repository to clear caches"); - client().admin().cluster().prepareDeleteRepository(repoName).get(); - createRepository(repoName, "fs", repoPath); - final String indexName = "test-index"; createIndex(indexName); diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index de42a76eef945..6d2d3ff103fed 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -349,7 +349,7 @@ protected static Settings.Builder indexSettingsNoReplicas(int shards) { /** * Randomly write an empty snapshot of an older version to an empty repository to simulate an older repository metadata format. */ - protected void maybeInitWithOldSnapshotVersion(String repoName, Path repoPath) throws IOException { + protected void maybeInitWithOldSnapshotVersion(String repoName, Path repoPath) throws Exception { if (randomBoolean() && randomBoolean()) { initWithSnapshotVersion(repoName, repoPath, VersionUtils.randomIndexCompatibleVersion(random())); } @@ -359,7 +359,7 @@ protected void maybeInitWithOldSnapshotVersion(String repoName, Path repoPath) t * Workaround to simulate BwC situation: taking a snapshot without indices here so that we don't create any new version shard * generations (the existence of which would short-circuit checks for the repo containing old version snapshots) */ - protected String initWithSnapshotVersion(String repoName, Path repoPath, Version version) throws IOException { + protected String initWithSnapshotVersion(String repoName, Path repoPath, Version version) throws Exception { assertThat("This hack only works on an empty repository", getRepositoryData(repoName).getSnapshotIds(), empty()); final String oldVersionSnapshot = OLD_VERSION_SNAPSHOT_PREFIX + version.id; final CreateSnapshotResponse createSnapshotResponse = clusterAdmin() @@ -393,6 +393,13 @@ protected String initWithSnapshotVersion(String repoName, Path repoPath, Version BlobStoreRepository.SNAPSHOT_FORMAT.write(downgradedSnapshotInfo, blobStoreRepository.blobStore().blobContainer(blobStoreRepository.basePath()), snapshotInfo.snapshotId().getUUID(), randomBoolean())))); + + final RepositoryMetadata repoMetadata = blobStoreRepository.getMetadata(); + if (BlobStoreRepository.CACHE_REPOSITORY_DATA.get(repoMetadata.settings())) { + logger.info("--> recreating repository to clear caches"); + assertAcked(client().admin().cluster().prepareDeleteRepository(repoName)); + createRepository(repoName, repoMetadata.type(), Settings.builder().put(repoMetadata.settings())); + } return oldVersionSnapshot; } From 5aa0dd7f339bf39f862fb3515f31863dd9f0d36d Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 31 Aug 2021 10:55:52 -0400 Subject: [PATCH 040/128] fix git blame after autoformtting (#77040) * fix git blame after autoformtting --- .git-blame-ignore-revs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index f5b6ecb9d52f9..9ffc1c8b92a1c 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -14,3 +14,6 @@ # Format more snapshot / restore relate projects 559c4e6ef4f9173bbb59043bacd0ac36c7281040 + +# Format aggregations and related code (server and x-pack) +d71544976608bdb53fa4d29521fb328e1033ee2f From 9e9a6d5601f8ac9a322a57d1c005f65104f4e45b Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Tue, 31 Aug 2021 08:12:04 -0700 Subject: [PATCH 041/128] Adds a lookup method to Painless for finding methods of all sub classes (#77044) This change adds a method to the PainlessLookup used to find methods of all allow listed sub classes. A method is specified for a specific super class, and then a list is built with all matching methods from the all the allow listed sub classes. If no matches are found, null is returned which is consistent with the behavior of the other look up methods. It is up to the caller to check. --- .../painless/lookup/PainlessLookup.java | 67 +++++++ .../elasticsearch/painless/LookupTests.java | 184 ++++++++++++++++-- .../org.elasticsearch.painless.lookup | 17 ++ 3 files changed, 256 insertions(+), 12 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index 464ae43c8072f..bb166eabd662b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -150,8 +150,16 @@ public PainlessMethod lookupPainlessMethod(Class targetClass, boolean isStati Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); + if (classesToPainlessClasses.containsKey(targetClass) == false) { + return null; + } + if (targetClass.isPrimitive()) { targetClass = typeToBoxedType(targetClass); + + if (classesToPainlessClasses.containsKey(targetClass) == false) { + return null; + } } String painlessMethodKey = buildPainlessMethodKey(methodName, methodArity); @@ -162,6 +170,61 @@ public PainlessMethod lookupPainlessMethod(Class targetClass, boolean isStati return lookupPainlessObject(targetClass, objectLookup); } + public List lookupPainlessSubClassesMethod(String targetCanonicalClassName, String methodName, int methodArity) { + Objects.requireNonNull(targetCanonicalClassName); + + Class targetClass = canonicalTypeNameToType(targetCanonicalClassName); + + if (targetClass == null) { + return null; + } + + return lookupPainlessSubClassesMethod(targetClass, methodName, methodArity); + } + + public List lookupPainlessSubClassesMethod(Class targetClass, String methodName, int methodArity) { + Objects.requireNonNull(targetClass); + Objects.requireNonNull(methodName); + + if (classesToPainlessClasses.containsKey(targetClass) == false) { + return null; + } + + if (targetClass.isPrimitive()) { + targetClass = typeToBoxedType(targetClass); + + if (classesToPainlessClasses.containsKey(targetClass) == false) { + return null; + } + } + + String painlessMethodKey = buildPainlessMethodKey(methodName, methodArity); + List> subClasses = new ArrayList<>(classesToDirectSubClasses.get(targetClass)); + Set> resolvedSubClasses = new HashSet<>(); + List subMethods = null; + + while (subClasses.isEmpty() == false) { + Class subClass = subClasses.remove(0); + + if (resolvedSubClasses.add(subClass)) { + subClasses.addAll(classesToDirectSubClasses.get(subClass)); + + PainlessClass painlessClass = classesToPainlessClasses.get(subClass); + PainlessMethod painlessMethod = painlessClass.methods.get(painlessMethodKey); + + if (painlessMethod != null) { + if (subMethods == null) { + subMethods = new ArrayList<>(); + } + + subMethods.add(painlessMethod); + } + } + } + + return subMethods; + } + public PainlessField lookupPainlessField(String targetCanonicalClassName, boolean isStatic, String fieldName) { Objects.requireNonNull(targetCanonicalClassName); @@ -178,6 +241,10 @@ public PainlessField lookupPainlessField(Class targetClass, boolean isStatic, Objects.requireNonNull(targetClass); Objects.requireNonNull(fieldName); + if (classesToPainlessClasses.containsKey(targetClass) == false) { + return null; + } + String painlessFieldKey = buildPainlessFieldKey(fieldName); Function objectLookup = isStatic ? targetPainlessClass -> targetPainlessClass.staticFields.get(painlessFieldKey) : diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java index a0ebbb5b25024..09dd970adfb6d 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java @@ -10,11 +10,13 @@ import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.spi.WhitelistLoader; import org.elasticsearch.test.ESTestCase; import org.junit.Before; import java.util.Collections; +import java.util.List; import java.util.Set; public class LookupTests extends ESTestCase { @@ -28,31 +30,54 @@ public void setup() { )); } - public static class A { } // in whitelist - public static class B extends A { } // not in whitelist - public static class C extends B { } // in whitelist - public static class D extends B { } // in whitelist + public static class A { } // in whitelist + public static class B extends A { } // not in whitelist + public static class C extends B { // in whitelist + public String getString0() { return "C/0"; } // in whitelist + } + public static class D extends B { // in whitelist + public String getString0() { return "D/0"; } // in whitelist + public String getString1(int param0) { return "D/1 (" + param0 + ")"; } // in whitelist + } public interface Z { } // in whitelist public interface Y { } // not in whitelist public interface X extends Y, Z { } // not in whitelist public interface V extends Y, Z { } // in whitelist - public interface U extends X { } // in whitelist - public interface T extends V { } // in whitelist + public interface U extends X { // in whitelist + String getString2(int x, int y); // in whitelist + String getString1(int param0); // in whitelist + String getString0(); // not in whitelist + } + public interface T extends V { // in whitelist + String getString1(int param0); // in whitelist + int getInt0(); // in whitelist + } public interface S extends U, X { } // in whitelist - public static class AA implements X { } // in whitelist - public static class AB extends AA implements S { } // not in whitelist - public static class AC extends AB implements V { } // in whitelist - public static class AD implements X, S, T { } // in whitelist + public static class AA implements X { } // in whitelist + public static class AB extends AA implements S { // not in whitelist + public String getString2(int x, int y) { return "" + x + y; } // not in whitelist + public String getString1(int param0) { return "" + param0; } // not in whitelist + public String getString0() { return ""; } // not in whitelist + } + public static class AC extends AB implements V { // in whitelist + public String getString2(int x, int y) { return "" + x + y; } // in whitelist + } + public static class AD extends AA implements X, S, T { // in whitelist + public String getString2(int x, int y) { return "" + x + y; } // in whitelist + public String getString1(int param0) { return "" + param0; } // in whitelist + public String getString0() { return ""; } // not in whitelist + public int getInt0() { return 0; } // in whitelist + } public void testDirectSubClasses() { Set> directSubClasses = painlessLookup.getDirectSubClasses(Object.class); assertEquals(4, directSubClasses.size()); + assertTrue(directSubClasses.contains(String.class)); assertTrue(directSubClasses.contains(A.class)); assertTrue(directSubClasses.contains(Z.class)); assertTrue(directSubClasses.contains(AA.class)); - assertTrue(directSubClasses.contains(AD.class)); directSubClasses = painlessLookup.getDirectSubClasses(A.class); assertEquals(2, directSubClasses.size()); @@ -101,8 +126,9 @@ public void testDirectSubClasses() { assertTrue(directSubClasses.contains(AD.class)); directSubClasses = painlessLookup.getDirectSubClasses(AA.class); - assertEquals(1, directSubClasses.size()); + assertEquals(2, directSubClasses.size()); assertTrue(directSubClasses.contains(AC.class)); + assertTrue(directSubClasses.contains(AD.class)); directSubClasses = painlessLookup.getDirectSubClasses(AB.class); assertNull(directSubClasses); @@ -113,4 +139,138 @@ public void testDirectSubClasses() { directSubClasses = painlessLookup.getDirectSubClasses(AD.class); assertTrue(directSubClasses.isEmpty()); } + + public void testDirectSubClassMethods() { + PainlessMethod CgetString0 = painlessLookup.lookupPainlessMethod(C.class, false, "getString0", 0); + PainlessMethod DgetString0 = painlessLookup.lookupPainlessMethod(D.class, false, "getString0", 0); + List subMethods = painlessLookup.lookupPainlessSubClassesMethod(A.class, "getString0", 0); + assertNotNull(subMethods); + assertEquals(2, subMethods.size()); + assertTrue(subMethods.contains(CgetString0)); + assertTrue(subMethods.contains(DgetString0)); + + PainlessMethod DgetString1 = painlessLookup.lookupPainlessMethod(D.class, false, "getString1", 1); + subMethods = painlessLookup.lookupPainlessSubClassesMethod(A.class, "getString1", 1); + assertNotNull(subMethods); + assertEquals(1, subMethods.size()); + assertTrue(subMethods.contains(DgetString1)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(A.class, "getString2", 0); + assertNull(subMethods); + + PainlessMethod ACgetString2 = painlessLookup.lookupPainlessMethod(AC.class, false, "getString2", 2); + PainlessMethod ADgetString2 = painlessLookup.lookupPainlessMethod(AD.class, false, "getString2", 2); + subMethods = painlessLookup.lookupPainlessSubClassesMethod(AA.class, "getString2", 2); + assertNotNull(subMethods); + assertEquals(2, subMethods.size()); + assertTrue(subMethods.contains(ACgetString2)); + assertTrue(subMethods.contains(ADgetString2)); + + PainlessMethod ADgetString1 = painlessLookup.lookupPainlessMethod(AD.class, false, "getString1", 1); + subMethods = painlessLookup.lookupPainlessSubClassesMethod(AA.class, "getString1", 1); + assertNotNull(subMethods); + assertEquals(1, subMethods.size()); + assertTrue(subMethods.contains(ADgetString1)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(AA.class, "getString0", 0); + assertNull(subMethods); + + PainlessMethod ADgetInt0 = painlessLookup.lookupPainlessMethod(AD.class, false, "getInt0", 0); + subMethods = painlessLookup.lookupPainlessSubClassesMethod(AA.class, "getInt0", 0); + assertNotNull(subMethods); + assertEquals(1, subMethods.size()); + assertTrue(subMethods.contains(ADgetInt0)); + + PainlessMethod UgetString2 = painlessLookup.lookupPainlessMethod(U.class, false, "getString2", 2); + subMethods = painlessLookup.lookupPainlessSubClassesMethod(Z.class, "getString2", 2); + assertNotNull(subMethods); + assertEquals(3, subMethods.size()); + assertTrue(subMethods.contains(UgetString2)); + assertTrue(subMethods.contains(ACgetString2)); + assertTrue(subMethods.contains(ADgetString2)); + + PainlessMethod UgetString1 = painlessLookup.lookupPainlessMethod(U.class, false, "getString1", 1); + PainlessMethod TgetString1 = painlessLookup.lookupPainlessMethod(T.class, false, "getString1", 1); + subMethods = painlessLookup.lookupPainlessSubClassesMethod(Z.class, "getString1", 1); + assertNotNull(subMethods); + assertEquals(3, subMethods.size()); + assertTrue(subMethods.contains(UgetString1)); + assertTrue(subMethods.contains(TgetString1)); + assertTrue(subMethods.contains(ADgetString1)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(Z.class, "getString0", 0); + assertNull(subMethods); + + PainlessMethod TgetInt0 = painlessLookup.lookupPainlessMethod(T.class, false, "getInt0", 0); + subMethods = painlessLookup.lookupPainlessSubClassesMethod(Z.class, "getInt0", 0); + assertNotNull(subMethods); + assertEquals(2, subMethods.size()); + assertTrue(subMethods.contains(TgetInt0)); + assertTrue(subMethods.contains(ADgetInt0)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(V.class, "getString2", 2); + assertNotNull(subMethods); + assertEquals(2, subMethods.size()); + assertTrue(subMethods.contains(ACgetString2)); + assertTrue(subMethods.contains(ADgetString2)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(V.class, "getString1", 1); + assertNotNull(subMethods); + assertEquals(2, subMethods.size()); + assertTrue(subMethods.contains(TgetString1)); + assertTrue(subMethods.contains(ADgetString1)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(V.class, "getString0", 0); + assertNull(subMethods); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(V.class, "getInt0", 0); + assertNotNull(subMethods); + assertEquals(2, subMethods.size()); + assertTrue(subMethods.contains(TgetInt0)); + assertTrue(subMethods.contains(ADgetInt0)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(U.class, "getString2", 2); + assertNotNull(subMethods); + assertEquals(2, subMethods.size()); + assertTrue(subMethods.contains(ACgetString2)); + assertTrue(subMethods.contains(ADgetString2)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(U.class, "getString1", 1); + assertNotNull(subMethods); + assertEquals(1, subMethods.size()); + assertTrue(subMethods.contains(ADgetString1)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(U.class, "getString0", 0); + assertNull(subMethods); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(U.class, "getInt0", 0); + assertNotNull(subMethods); + assertEquals(1, subMethods.size()); + assertTrue(subMethods.contains(ADgetInt0)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(V.class, "getInt0", 0); + assertNotNull(subMethods); + assertEquals(2, subMethods.size()); + assertTrue(subMethods.contains(TgetInt0)); + assertTrue(subMethods.contains(ADgetInt0)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(S.class, "getString2", 2); + assertNotNull(subMethods); + assertEquals(2, subMethods.size()); + assertTrue(subMethods.contains(ACgetString2)); + assertTrue(subMethods.contains(ADgetString2)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(S.class, "getString1", 1); + assertNotNull(subMethods); + assertEquals(1, subMethods.size()); + assertTrue(subMethods.contains(ADgetString1)); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(S.class, "getString0", 0); + assertNull(subMethods); + + subMethods = painlessLookup.lookupPainlessSubClassesMethod(S.class, "getInt0", 0); + assertNotNull(subMethods); + assertEquals(1, subMethods.size()); + assertTrue(subMethods.contains(ADgetInt0)); + } } diff --git a/modules/lang-painless/src/test/resources/org/elasticsearch/painless/org.elasticsearch.painless.lookup b/modules/lang-painless/src/test/resources/org/elasticsearch/painless/org.elasticsearch.painless.lookup index b6a5adc6208b7..39ff66776103f 100644 --- a/modules/lang-painless/src/test/resources/org/elasticsearch/painless/org.elasticsearch.painless.lookup +++ b/modules/lang-painless/src/test/resources/org/elasticsearch/painless/org.elasticsearch.painless.lookup @@ -1,13 +1,22 @@ +class int @no_import { +} + class java.lang.Object { } +class java.lang.String { +} + class org.elasticsearch.painless.LookupTests$A { } class org.elasticsearch.painless.LookupTests$C { + String getString0() } class org.elasticsearch.painless.LookupTests$D { + String getString0() + String getString1(int) } class org.elasticsearch.painless.LookupTests$Z { @@ -17,9 +26,13 @@ class org.elasticsearch.painless.LookupTests$V { } class org.elasticsearch.painless.LookupTests$U { + String getString2(int, int); + String getString1(int); } class org.elasticsearch.painless.LookupTests$T { + String getString1(int); + int getInt0(); } class org.elasticsearch.painless.LookupTests$S { @@ -29,7 +42,11 @@ class org.elasticsearch.painless.LookupTests$AA { } class org.elasticsearch.painless.LookupTests$AC { + String getString2(int, int); } class org.elasticsearch.painless.LookupTests$AD { + String getString2(int, int) + String getString1(int) + int getInt0() } \ No newline at end of file From 37516daa561282df5a2ca2ce45db8c9aa65b82a3 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 31 Aug 2021 17:25:05 +0200 Subject: [PATCH 042/128] Make some Mappers Singletons (#77067) Just some obvious singletons we can use to save a little memory/cache that I found during other experiments. --- .../index/mapper/DocCountFieldMapper.java | 4 +++- .../index/mapper/FieldMapper.java | 4 +++- .../index/mapper/FieldNamesFieldMapper.java | 19 +++++++++++++------ .../index/mapper/IgnoredFieldMapper.java | 4 +++- .../index/mapper/IndexFieldMapper.java | 4 +++- .../index/mapper/KeywordFieldMapper.java | 4 ++-- .../index/mapper/NestedPathFieldMapper.java | 15 ++++++++++----- .../index/mapper/RoutingFieldMapper.java | 11 +++++++++-- .../index/mapper/SeqNoFieldMapper.java | 2 +- .../index/mapper/SourceFieldMapper.java | 11 ++++++----- .../mapper/FieldNamesFieldTypeTests.java | 4 ++-- .../bucket/filter/FiltersAggregatorTests.java | 4 ++-- .../DateHistogramAggregatorTests.java | 2 +- 13 files changed, 58 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java index 14d1a0ee9336b..b379729d50012 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java @@ -22,7 +22,9 @@ public class DocCountFieldMapper extends MetadataFieldMapper { public static final String NAME = "_doc_count"; public static final String CONTENT_TYPE = "_doc_count"; - public static final TypeParser PARSER = new FixedTypeParser(c -> new DocCountFieldMapper()); + private static final DocCountFieldMapper INSTANCE = new DocCountFieldMapper(); + + public static final TypeParser PARSER = new FixedTypeParser(c -> INSTANCE); public static final class DocCountFieldType extends MappedFieldType { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 57d42c43d0c70..9f5794b224d73 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -385,8 +385,10 @@ public final Map indexAnalyzers() { public static class MultiFields implements Iterable, ToXContent { + private static final MultiFields EMPTY = new MultiFields(Collections.emptyMap()); + public static MultiFields empty() { - return new MultiFields(Collections.emptyMap()); + return EMPTY; } public static class Builder { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java index bde79e6e9fc60..4a41f16aa12b6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java @@ -93,21 +93,28 @@ public FieldNamesFieldMapper build() { "field_names_enabled_parameter", ENABLED_DEPRECATION_MESSAGE); } } - FieldNamesFieldType fieldNamesFieldType = new FieldNamesFieldType(enabled.getValue().value()); - return new FieldNamesFieldMapper(enabled.getValue(), indexVersionCreated, fieldNamesFieldType); + return new FieldNamesFieldMapper(enabled.getValue(), indexVersionCreated); } } public static final TypeParser PARSER = new ConfigurableTypeParser( - c -> new FieldNamesFieldMapper(Defaults.ENABLED, c.indexVersionCreated(), new FieldNamesFieldType(Defaults.ENABLED.value())), + c -> new FieldNamesFieldMapper(Defaults.ENABLED, c.indexVersionCreated()), c -> new Builder(c.indexVersionCreated()) ); public static final class FieldNamesFieldType extends TermBasedFieldType { + private static final FieldNamesFieldType ENABLED = new FieldNamesFieldType(true); + + private static final FieldNamesFieldType DISABLED = new FieldNamesFieldType(false); + private final boolean enabled; - public FieldNamesFieldType(boolean enabled) { + public static FieldNamesFieldType get(boolean enabled) { + return enabled ? ENABLED : DISABLED; + } + + private FieldNamesFieldType(boolean enabled) { super(Defaults.NAME, true, false, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); this.enabled = enabled; } @@ -145,8 +152,8 @@ public Query termQuery(Object value, SearchExecutionContext context) { private final Explicit enabled; private final Version indexVersionCreated; - private FieldNamesFieldMapper(Explicit enabled, Version indexVersionCreated, FieldNamesFieldType mappedFieldType) { - super(mappedFieldType); + private FieldNamesFieldMapper(Explicit enabled, Version indexVersionCreated) { + super(FieldNamesFieldType.get(enabled.value())); this.enabled = enabled; this.indexVersionCreated = indexVersionCreated; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java index 7ae84c27a76e3..f4c44d4227f67 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java @@ -40,7 +40,9 @@ public static class Defaults { } } - public static final TypeParser PARSER = new FixedTypeParser(c -> new IgnoredFieldMapper()); + private static final IgnoredFieldMapper INSTANCE = new IgnoredFieldMapper(); + + public static final TypeParser PARSER = new FixedTypeParser(c -> INSTANCE); public static final class IgnoredFieldType extends StringFieldType { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index 001313f4db545..70382856c2e80 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -26,7 +26,9 @@ public class IndexFieldMapper extends MetadataFieldMapper { public static final String CONTENT_TYPE = "_index"; - public static final TypeParser PARSER = new FixedTypeParser(c -> new IndexFieldMapper()); + private static final IndexFieldMapper INSTANCE = new IndexFieldMapper(); + + public static final TypeParser PARSER = new FixedTypeParser(c -> INSTANCE); static final class IndexFieldType extends ConstantFieldType { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 1a3db9d4f96d5..530acfa8a917a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -452,8 +452,8 @@ public boolean isDimension() { private final IndexAnalyzers indexAnalyzers; - protected KeywordFieldMapper(String simpleName, FieldType fieldType, KeywordFieldType mappedFieldType, - MultiFields multiFields, CopyTo copyTo, Builder builder) { + private KeywordFieldMapper(String simpleName, FieldType fieldType, KeywordFieldType mappedFieldType, + MultiFields multiFields, CopyTo copyTo, Builder builder) { super(simpleName, mappedFieldType, mappedFieldType.normalizer, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.getValue()); assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java index d097d3de2309f..249fbae5ca36d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java @@ -23,8 +23,12 @@ public class NestedPathFieldMapper extends MetadataFieldMapper { public static final String NAME_PRE_V8 = "_type"; + public static final String NAME = "_nested_path"; + private static final NestedPathFieldMapper INSTANCE = new NestedPathFieldMapper(NAME); + private static final NestedPathFieldMapper INSTANCE_PRE_V8 = new NestedPathFieldMapper(NAME_PRE_V8); + public static String name(Version version) { if (version.before(Version.V_8_0_0)) { return NAME_PRE_V8; @@ -53,12 +57,13 @@ public static class Defaults { } } - public static final TypeParser PARSER = new FixedTypeParser(c -> new NestedPathFieldMapper(c.indexVersionCreated())); + public static final TypeParser PARSER = + new FixedTypeParser(c -> c.indexVersionCreated().before(Version.V_8_0_0) ? INSTANCE_PRE_V8 : INSTANCE); public static final class NestedPathFieldType extends StringFieldType { - private NestedPathFieldType(Version version) { - super(NestedPathFieldMapper.name(version), true, false, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + private NestedPathFieldType(String name) { + super(name, true, false, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); } @Override @@ -77,8 +82,8 @@ public ValueFetcher valueFetcher(SearchExecutionContext context, String format) } } - private NestedPathFieldMapper(Version version) { - super(new NestedPathFieldType(version)); + private NestedPathFieldMapper(String name) { + super(new NestedPathFieldType(name)); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java index 905861db18469..e7a676098d9e4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java @@ -60,12 +60,12 @@ protected List> getParameters() { @Override public RoutingFieldMapper build() { - return new RoutingFieldMapper(required.getValue()); + return RoutingFieldMapper.get(required.getValue()); } } public static final TypeParser PARSER = new ConfigurableTypeParser( - c -> new RoutingFieldMapper(Defaults.REQUIRED), + c -> RoutingFieldMapper.get(Defaults.REQUIRED), c -> new Builder() ); @@ -90,6 +90,13 @@ public ValueFetcher valueFetcher(SearchExecutionContext context, String format) private final boolean required; + private static final RoutingFieldMapper REQUIRED = new RoutingFieldMapper(true); + private static final RoutingFieldMapper NOT_REQUIRED = new RoutingFieldMapper(false); + + public static RoutingFieldMapper get(boolean required) { + return required ? REQUIRED : NOT_REQUIRED; + } + private RoutingFieldMapper(boolean required) { super(RoutingFieldType.INSTANCE, Lucene.KEYWORD_ANALYZER); this.required = required; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java index f6ae8101577c1..76cb57665e4e6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java @@ -178,7 +178,7 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S } } - public SeqNoFieldMapper() { + private SeqNoFieldMapper() { super(SeqNoFieldType.INSTANCE); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 1cec8c8019ea3..88fd543071e9e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -43,6 +43,8 @@ public class SourceFieldMapper extends MetadataFieldMapper { public static final String CONTENT_TYPE = "_source"; private final Function, Map> filter; + private static final SourceFieldMapper DEFAULT = new SourceFieldMapper(Defaults.ENABLED, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY); + public static class Defaults { public static final String NAME = SourceFieldMapper.NAME; public static final boolean ENABLED = true; @@ -80,13 +82,16 @@ protected List> getParameters() { @Override public SourceFieldMapper build() { + if (enabled.getValue() == Defaults.ENABLED && includes.getValue().isEmpty() && excludes.getValue().isEmpty()) { + return DEFAULT; + } return new SourceFieldMapper(enabled.getValue(), includes.getValue().toArray(String[]::new), excludes.getValue().toArray(String[]::new)); } } - public static final TypeParser PARSER = new ConfigurableTypeParser(c -> new SourceFieldMapper(), c -> new Builder()); + public static final TypeParser PARSER = new ConfigurableTypeParser(c -> DEFAULT, c -> new Builder()); static final class SourceFieldType extends MappedFieldType { @@ -122,10 +127,6 @@ public Query termQuery(Object value, SearchExecutionContext context) { private final String[] includes; private final String[] excludes; - private SourceFieldMapper() { - this(Defaults.ENABLED, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY); - } - private SourceFieldMapper(boolean enabled, String[] includes, String[] excludes) { super(new SourceFieldType(enabled)); this.enabled = enabled; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java index 4024525d48a3f..6b9395d6cf276 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java @@ -27,7 +27,7 @@ public class FieldNamesFieldTypeTests extends ESTestCase { public void testTermQuery() { - FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = new FieldNamesFieldMapper.FieldNamesFieldType(true); + FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = FieldNamesFieldMapper.FieldNamesFieldType.get(true); KeywordFieldMapper.KeywordFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("field_name"); Settings settings = settings(Version.CURRENT).build(); @@ -42,7 +42,7 @@ public void testTermQuery() { assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.CONTENT_TYPE, "field_name")), termQuery); assertWarnings("terms query on the _field_names field is deprecated and will be removed, use exists query instead"); - FieldNamesFieldMapper.FieldNamesFieldType unsearchable = new FieldNamesFieldMapper.FieldNamesFieldType(false); + FieldNamesFieldMapper.FieldNamesFieldType unsearchable = FieldNamesFieldMapper.FieldNamesFieldType.get(false); IllegalStateException e = expectThrows(IllegalStateException.class, () -> unsearchable.termQuery("field_name", null)); assertEquals("Cannot run [exists] queries if the [_field_names] field is disabled", e.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java index 9e3b53e3ef004..062aaa7276168 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java @@ -1101,7 +1101,7 @@ private void docValuesFieldExistsTestCase( ) throws IOException { AggregationBuilder builder = new FiltersAggregationBuilder("test", new KeyedFilter("q1", exists)); // Exists queries convert to MatchNone if this isn't defined - FieldNamesFieldMapper.FieldNamesFieldType fnft = new FieldNamesFieldMapper.FieldNamesFieldType(true); + FieldNamesFieldMapper.FieldNamesFieldType fnft = FieldNamesFieldMapper.FieldNamesFieldType.get(true); debugTestCase(builder, new MatchAllDocsQuery(), iw -> { for (int i = 0; i < 10; i++) { iw.addDocument(buildDocWithField.apply(i)); @@ -1127,7 +1127,7 @@ private void docValuesFieldExistsNoDataTestCase(MappedFieldType fieldType) throw } }; // Exists queries convert to MatchNone if this isn't defined - FieldNamesFieldMapper.FieldNamesFieldType fnft = new FieldNamesFieldMapper.FieldNamesFieldType(true); + FieldNamesFieldMapper.FieldNamesFieldType fnft = FieldNamesFieldMapper.FieldNamesFieldType.get(true); withAggregator(builder, new MatchAllDocsQuery(), buildIndex, (searcher, aggregator) -> { assertThat(aggregator, instanceOf(FilterByFilterAggregator.class)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index 9d96c8fbcf177..f1e96913618e6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -844,7 +844,7 @@ public void testOneBucketOptimized() throws IOException { }; DateFieldMapper.DateFieldType ft = new DateFieldMapper.DateFieldType("f"); // Exists queries convert to MatchNone if this isn't defined - FieldNamesFieldMapper.FieldNamesFieldType fnft = new FieldNamesFieldMapper.FieldNamesFieldType(true); + FieldNamesFieldMapper.FieldNamesFieldType fnft = FieldNamesFieldMapper.FieldNamesFieldType.get(true); debugTestCase( builder, new MatchAllDocsQuery(), From 627c0ee9c67fc9dbc90a660d37096446aa2e0791 Mon Sep 17 00:00:00 2001 From: Lukas Wegmann Date: Tue, 31 Aug 2021 17:30:49 +0200 Subject: [PATCH 043/128] SQL: Fix disjunctions (and `IN`) with multiple date math expressions (#76424) * SQL: Fix disjunctions with multiple date math expressions * review comments * remove redundant FieldAttribute parameter * address comments and add more specs * fix typo --- .../sql/functions/date-time.asciidoc | 14 ++ .../xpack/eql/planner/QueryTranslator.java | 13 +- .../xpack/ql/optimizer/OptimizerRules.java | 3 +- .../ql/planner/ExpressionTranslator.java | 6 + .../ql/planner/ExpressionTranslators.java | 100 +++++------ .../ql/optimizer/OptimizerRulesTests.java | 13 ++ .../server/src/main/resources/date.csv-spec | 162 ++++++++++++++++-- .../src/main/resources/docs/docs.csv-spec | 25 +++ .../server/src/main/resources/filter.sql-spec | 2 + .../sql/planner/QueryTranslatorTests.java | 17 +- 10 files changed, 259 insertions(+), 96 deletions(-) diff --git a/docs/reference/sql/functions/date-time.asciidoc b/docs/reference/sql/functions/date-time.asciidoc index c43d926514fe7..f4727640d8a07 100644 --- a/docs/reference/sql/functions/date-time.asciidoc +++ b/docs/reference/sql/functions/date-time.asciidoc @@ -53,6 +53,20 @@ s|Description | `INTERVAL '45:01.23' MINUTES TO SECONDS` | 45 minutes, 1 second and 230000000 nanoseconds |=== +==== Comparison + +Date/time fields can be compared to <> expressions with the equality (`=`) and `IN` operators: + +[source, sql] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/docs.csv-spec[dtDateMathEquals] +-------------------------------------------------- + +[source, sql] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/docs.csv-spec[dtDateMathIn] +-------------------------------------------------- + ==== Operators Basic arithmetic operators (`+`, `-`, `*`) support date/time parameters as indicated below: diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryTranslator.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryTranslator.java index afd1de6da05d6..606cc15d25a11 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryTranslator.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryTranslator.java @@ -112,25 +112,22 @@ public static void checkInsensitiveComparison(InsensitiveBinaryComparison bc) { } private static Query translate(InsensitiveBinaryComparison bc, TranslatorHandler handler) { + FieldAttribute field = checkIsFieldAttribute(bc.left()); Source source = bc.source(); - String name = handler.nameOf(bc.left()); Object value = valueOf(bc.right()); if (bc instanceof InsensitiveEquals || bc instanceof InsensitiveNotEquals) { - if (bc.left() instanceof FieldAttribute) { - // equality should always be against an exact match - // (which is important for strings) - name = ((FieldAttribute) bc.left()).exactAttribute().name(); - } + // equality should always be against an exact match + // (which is important for strings) + String name = field.exactAttribute().name(); + Query query = new TermQuery(source, name, value, true); if (bc instanceof InsensitiveNotEquals) { query = new NotQuery(source, query); } - return query; } - throw new QlIllegalArgumentException("Don't know how to translate binary comparison [{}] in [{}]", bc.right().nodeString(), bc); } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java index acc9d74e2d3d5..e88b10eb8fcd7 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java @@ -356,7 +356,8 @@ private Expression propagate(And and) { } else if (ex instanceof Equals || ex instanceof NullEquals) { BinaryComparison otherEq = (BinaryComparison) ex; // equals on different values evaluate to FALSE - if (otherEq.right().foldable()) { + // ignore date/time fields as equality comparison might actually be a range check + if (otherEq.right().foldable() && DataTypes.isDateTime(otherEq.left().dataType()) == false) { for (BinaryComparison eq : equals) { if (otherEq.left().semanticEquals(eq.left())) { Integer comp = BinaryComparison.compare(eq.right().fold(), otherEq.right().fold()); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslator.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslator.java index a96a25b2dca12..9024c0ceead1e 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslator.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslator.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.querydsl.query.NestedQuery; import org.elasticsearch.xpack.ql.querydsl.query.Query; +import org.elasticsearch.xpack.ql.util.Check; import org.elasticsearch.xpack.ql.util.ReflectionUtils; public abstract class ExpressionTranslator { @@ -33,4 +34,9 @@ public static Query wrapIfNested(Query query, Expression exp) { } return query; } + + public static FieldAttribute checkIsFieldAttribute(Expression e) { + Check.isTrue(e instanceof FieldAttribute, "Expected a FieldAttribute but received [{}]", e); + return (FieldAttribute) e; + } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java index e301619dc0fda..da902e26d3f8c 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java @@ -50,15 +50,14 @@ import org.elasticsearch.xpack.ql.querydsl.query.TermsQuery; import org.elasticsearch.xpack.ql.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.Check; -import org.elasticsearch.xpack.ql.util.CollectionUtils; import java.time.OffsetTime; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.temporal.TemporalAccessor; +import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; @@ -220,13 +219,7 @@ public static Query doTranslate(IsNotNull isNotNull, TranslatorHandler handler) } private static Query translate(IsNotNull isNotNull, TranslatorHandler handler) { - Query query = null; - if (isNotNull.field() instanceof FieldAttribute) { - query = new ExistsQuery(isNotNull.source(), handler.nameOf(isNotNull.field())); - } else { - query = new ScriptQuery(isNotNull.source(), isNotNull.asScript()); - } - return query; + return new ExistsQuery(isNotNull.source(), handler.nameOf(isNotNull.field())); } } @@ -242,15 +235,7 @@ public static Query doTranslate(IsNull isNull, TranslatorHandler handler) { } private static Query translate(IsNull isNull, TranslatorHandler handler) { - Query query = null; - - if (isNull.field() instanceof FieldAttribute) { - query = new NotQuery(isNull.source(), new ExistsQuery(isNull.source(), handler.nameOf(isNull.field()))); - } else { - query = new ScriptQuery(isNull.source(), isNull.asScript()); - } - - return query; + return new NotQuery(isNull.source(), new ExistsQuery(isNull.source(), handler.nameOf(isNull.field()))); } } @@ -274,9 +259,10 @@ public static Query doTranslate(BinaryComparison bc, TranslatorHandler handler) return handler.wrapFunctionQuery(bc, bc.left(), () -> translate(bc, handler)); } - private static Query translate(BinaryComparison bc, TranslatorHandler handler) { + static Query translate(BinaryComparison bc, TranslatorHandler handler) { + FieldAttribute field = checkIsFieldAttribute(bc.left()); Source source = bc.source(); - String name = handler.nameOf(bc.left()); + String name = handler.nameOf(field); Object value = valueOf(bc.right()); String format = null; boolean isDateLiteralComparison = false; @@ -300,7 +286,7 @@ private static Query translate(BinaryComparison bc, TranslatorHandler handler) { } ZoneId zoneId = null; - if (DataTypes.isDateTime(bc.left().dataType())) { + if (DataTypes.isDateTime(field.dataType())) { zoneId = bc.zoneId(); } if (bc instanceof GreaterThan) { @@ -316,11 +302,10 @@ private static Query translate(BinaryComparison bc, TranslatorHandler handler) { return new RangeQuery(source, name, null, false, value, true, format, zoneId); } if (bc instanceof Equals || bc instanceof NullEquals || bc instanceof NotEquals) { - if (bc.left() instanceof FieldAttribute) { - // equality should always be against an exact match - // (which is important for strings) - name = ((FieldAttribute) bc.left()).exactAttribute().name(); - } + // equality should always be against an exact match + // (which is important for strings) + name = field.exactAttribute().name(); + Query query; if (isDateLiteralComparison) { // dates equality uses a range query because it's the one that has a "format" parameter @@ -346,11 +331,11 @@ protected Query asQuery(Range r, TranslatorHandler handler) { return doTranslate(r, handler); } - public static Query doTranslate(Range r, TranslatorHandler handler) { + public static Query doTranslate(Range r, TranslatorHandler handler) { return handler.wrapFunctionQuery(r, r.value(), () -> translate(r, handler)); } - private static RangeQuery translate(Range r, TranslatorHandler handler) { + private static RangeQuery translate(Range r, TranslatorHandler handler) { Object lower = valueOf(r.lower()); Object upper = valueOf(r.upper()); String format = null; @@ -392,41 +377,36 @@ public static Query doTranslate(In in, TranslatorHandler handler) { } private static Query translate(In in, TranslatorHandler handler) { - Query q; - if (in.value() instanceof FieldAttribute) { - // equality should always be against an exact match (which is important for strings) - FieldAttribute fa = (FieldAttribute) in.value(); - DataType dt = fa.dataType(); - - List list = in.list(); - Set set = new LinkedHashSet<>(CollectionUtils.mapSize(list.size())); - list.forEach(e -> { - // TODO: this needs to be handled inside the optimizer - if (DataTypes.isNull(e.dataType()) == false) { - set.add(handler.convert(valueOf(e), dt)); - } - }); - - if (DataTypes.isDateTime(dt)) { - DateFormatter formatter = DateFormatter.forPattern(DATE_FORMAT); - - q = null; - for (Object o : set) { - assert o instanceof ZonedDateTime : "expected a ZonedDateTime, but got: " + o.getClass().getName(); - // see comment in Ranges#doTranslate() as to why formatting as String is required - String zdt = formatter.format((ZonedDateTime) o); - RangeQuery right = new RangeQuery( - in.source(), fa.exactAttribute().name(), - zdt, true, zdt, true, formatter.pattern(), in.zoneId()); - q = q == null ? right : new BoolQuery(in.source(), false, q, right); + FieldAttribute field = checkIsFieldAttribute(in.value()); + boolean isDateTimeComparison = DataTypes.isDateTime(field.dataType()); + + Set terms = new LinkedHashSet<>(); + List queries = new ArrayList<>(); + + for (Expression rhs : in.list()) { + if (DataTypes.isNull(rhs.dataType()) == false) { + if (isDateTimeComparison) { + // delegates to BinaryComparisons translator to ensure consistent handling of date and time values + Query query = BinaryComparisons.translate(new Equals(in.source(), in.value(), rhs, in.zoneId()), handler); + + if (query instanceof TermQuery) { + terms.add(((TermQuery) query).value()); + } else { + queries.add(query); + } + } else { + terms.add(valueOf(rhs)); } - } else { - q = new TermsQuery(in.source(), fa.exactAttribute().name(), set); } - } else { - q = new ScriptQuery(in.source(), in.asScript()); } - return q; + + if (terms.isEmpty() == false) { + String fieldName = field.exactAttribute().name(); + queries.add(new TermsQuery(in.source(), fieldName, terms)); + } + + return queries.stream() + .reduce((q1, q2) -> or(in.source(), q1, q2)).get(); } } diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java index 510302da76c31..ee3429eb5cbd7 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java @@ -55,6 +55,7 @@ import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -1413,6 +1414,18 @@ public void testPropagateEquals_VarEq2OrVarRangeGt3Lt4OrVarGt2OrVarNe2() { assertEquals(TRUE, exp); } + // a == 1 AND a == 2 -> nop for date/time fields + public void testPropagateEquals_ignoreDateTimeFields() { + FieldAttribute fa = getFieldAttribute("a", DataTypes.DATETIME); + Equals eq1 = equalsOf(fa, ONE); + Equals eq2 = equalsOf(fa, TWO); + And and = new And(EMPTY, eq1, eq2); + + PropagateEquals rule = new PropagateEquals(); + Expression exp = rule.rule(and); + assertEquals(and, exp); + } + // // Like / Regex // diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/date.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/date.csv-spec index 06a14e45b9df5..438a90ecef426 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/date.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/date.csv-spec @@ -42,16 +42,16 @@ SELECT first_name FROM test_emp WHERE hire_date > CURRENT_DATE() - INTERVAL 45 Y first_name ----------------- -Alejandro -Amabile -Anneke -Anoosh -Arumugam -Basil -Berhard -Berni -Bezalel -Bojan +Alejandro +Amabile +Anneke +Anoosh +Arumugam +Basil +Berhard +Berni +Bezalel +Bojan ; currentDateFilterScript @@ -169,9 +169,9 @@ selectDateParse schema::date1:date SELECT DATE_PARSE('07/04/2020', 'dd/MM/uuuu') AS date1; - date1 + date1 ------------ -2020-04-07 +2020-04-07 ; @@ -266,3 +266,141 @@ HAVING DATE_PARSE(DATETIME_FORMAT(MAX(birth_date), 'dd/MM/uuuu'), 'dd/MM/uuuu') 1962-12-29 00:00:00.000Z | 12 null | null ; + +filterDateMath +SELECT emp_no FROM test_emp WHERE hire_date = '2021-02-03||-27y/y'; + + emp_no +--------------- +10008 +10030 +10044 +10085 +; + +filterDateMathIn +SELECT emp_no FROM test_emp WHERE hire_date IN ('2021-02-03||-27y/y'); + + emp_no +--------------- +10008 +10030 +10044 +10085 +; + +filterDateMathDisjunction +SELECT emp_no FROM test_emp WHERE hire_date = '2021-02-03||-27y/y' OR hire_date = '2021-02-03||-28y/y'; + + emp_no +--------------- +10008 +10017 +10030 +10040 +10042 +10044 +10085 +; + +filterInWithMultipleDateMath +SELECT emp_no FROM test_emp WHERE hire_date IN ('2021-02-03||-27y/y', '2021-02-03||-28y/y'); + + emp_no +--------------- +10008 +10017 +10030 +10040 +10042 +10044 +10085 +; + +filterDateMathDisjunctionWithExactDate +SELECT emp_no FROM test_emp WHERE hire_date = '2021-02-03||-27y/y' OR hire_date = '1993-08-03'::datetime; + + emp_no +--------------- +10008 +10017 +10030 +10044 +10085 +; + +filterInWithDateMathAndDateTime +SELECT emp_no FROM test_emp WHERE hire_date IN ('2021-02-03||-27y/y', '1993-08-03'::datetime); + + emp_no +--------------- +10008 +10017 +10030 +10044 +10085 +; + +filterMixedDatetimeDisjunction +SELECT emp_no FROM test_emp +WHERE hire_date = '2000-02-18||-1y/y' + OR hire_date = '1997-05-19T00:00:00.000Z'::datetime + OR hire_date = '1996-11-05T00:00:00.000Z' + OR hire_date = '1995-12-15'; + + emp_no +--------------- +10019 +10024 +10084 +10093 +; + +filterMixedDatetimeIn +SELECT emp_no FROM test_emp +WHERE hire_date IN ('2000-02-18||-1y/y', '1997-05-19T00:00:00.000Z'::datetime, '1996-11-05T00:00:00.000Z', '1995-12-15'); + + emp_no +--------------- +10019 +10024 +10084 +10093 +; + +filterDateMathDisjunctionWithIn +SELECT emp_no FROM test_emp WHERE hire_date = '2021-02-03||-27y/y' OR hire_date = '1992-12-18'::datetime + OR hire_date IN ('2021-02-03||-28y/y', '1992-01-03T00:00:00.000Z'::datetime); + + emp_no +--------------- +10008 +10012 +10017 +10030 +10036 +10040 +10042 +10044 +10085 +; + +filterDateMathConjunction +SELECT emp_no FROM test_emp WHERE hire_date = '2021-09-03||-27y/y' AND hire_date = '2021-09-03||-27y/M'; + + emp_no +--------------- +10008 +; + +// currently not supported +filterDateMathDerivedValue-Ignore +SELECT emp_no FROM test_emp WHERE DATE_ADD('day', 1, hire_date) = '2021-02-03||-27y/y'; + + emp_no +--------------- +10008 +10030 +10044 +10085 +; diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec index d0e68900872f1..c93c0709a6f98 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/docs/docs.csv-spec @@ -883,6 +883,31 @@ null |10 // /////////////////////////////// +dtDateMathEquals +// tag::dtDateMathEquals +SELECT hire_date FROM emp WHERE hire_date = '1987-03-01||+4y/y'; + + hire_date +------------------------ +1991-01-26T00:00:00.000Z +1991-10-22T00:00:00.000Z +1991-09-01T00:00:00.000Z +1991-06-26T00:00:00.000Z +1991-08-30T00:00:00.000Z +1991-12-01T00:00:00.000Z +// end::dtDateMathEquals +; + +dtDateMathIn +// tag::dtDateMathIn +SELECT hire_date FROM emp WHERE hire_date IN ('1987-03-01||+2y/M', '1987-03-01||+3y/M'); + + hire_date +------------------------ +1989-03-31T00:00:00.000Z +1990-03-02T00:00:00.000Z +// end::dtDateMathIn +; dtIntervalPlusInterval // tag::dtIntervalPlusInterval diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/filter.sql-spec b/x-pack/plugin/sql/qa/server/src/main/resources/filter.sql-spec index 22a6cb2c139df..388f2280a98e0 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/filter.sql-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/filter.sql-spec @@ -131,6 +131,8 @@ whereWithInAndNullHandling1 SELECT last_name l FROM "test_emp" WHERE languages in (2, 10)AND (emp_no = 10018 OR emp_no = 10019 OR emp_no = 10020) ORDER BY emp_no; whereWithInAndNullHandling2 SELECT last_name l FROM "test_emp" WHERE languages in (2, null, 10) AND (emp_no = 10018 OR emp_no = 10019 OR emp_no = 10020) ORDER BY emp_no; +whereWithInNull +SELECT last_name l FROM test_emp WHERE languages in (null); whereWithInAndMultipleValueTypes SELECT last_name l FROM test_emp WHERE hire_date in('1986-06-26T00:00:00.000Z'::datetime, '1986-08-28T00:00:00.000Z'); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 7fa9da72eebc6..6987d2a5277e9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -6,8 +6,8 @@ */ package org.elasticsearch.xpack.sql.planner; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; @@ -112,7 +112,6 @@ import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.startsWith; public class QueryTranslatorTests extends ESTestCase { @@ -408,18 +407,6 @@ public void testSameExpressionWithoutAlias() { assertNotEquals(eqe.output().get(0).id(), eqe.output().get(1).id()); } - public void testInOutOfRangeValues() { - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, - () -> optimizeAndPlan("SELECT int FROM test WHERE int IN (1, 2, 3, " + Long.MAX_VALUE + ", 5, 6, 7)")); - assertThat(ex.getMessage(), is("[" + Long.MAX_VALUE + "] out of [integer] range")); - } - - public void testInInRangeValues() { - TestContext testContext = new TestContext("mapping-numeric.json"); - PhysicalPlan p = testContext.optimizeAndPlan("SELECT long FROM test WHERE long IN (1, 2, 3, " + Long.MAX_VALUE + ", 5, 6, 7)"); - assertEquals(EsQueryExec.class, p.getClass()); - } - // Datetime /////////// public void testTermEqualityForDateWithLiteralDate() { @@ -669,7 +656,7 @@ private void testDateRangeWithCurrentFunctionsAndRangeOptimization( public void testDateRangeWithESDateMath() { ZoneId zoneId = randomZone(); String operator = randomFrom(">", ">=", "<", "<=", "=", "!="); - String dateMath = randomFrom("now", "now/d", "now/h", "now-2h", "now+2h", "now-5d", "now+5d"); + String dateMath = randomFrom("now", "now/d", "now/h", "now-2h", "now+2h", "now-5d", "now+5d", "2021-01-01||/M"); LogicalPlan p = plan("SELECT some.string FROM test WHERE date" + operator + "'" + dateMath + "'", zoneId); assertTrue(p instanceof Project); p = ((Project) p).child(); From ead0020497b1aeb989e940c239b5447f7c571279 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 31 Aug 2021 17:35:32 +0100 Subject: [PATCH 044/128] Tidy up ClusterApplierService (#76837) This commit cleans up some cruft left over from older versions of the `ClusterApplierService`: - `UpdateTask` doesn't need to implement lots of interfaces and give access to its internals, it can just pass appropriate arguments to `runTasks()`. - No need for the `runOnApplierThread` override with a default priority, just have callers be explicit about the priority. - `submitStateUpdateTask` takes a config which never has a timeout, may as well just pass the priority and remove the dead code - `SafeClusterApplyListener` doesn't need to be a `ClusterApplyListener`, may as well just be an `ActionListener`. - No implementations of `ClusterApplyListener` care about the source argument, may as well drop it. - Adds assertions to prevent `ClusterApplyListener` implementations from throwing exceptions since we just swallow them. - No need to override getting the current time in the `ClusterApplierService`, we can control this from the `ThreadPool`. --- .../store/IndicesStoreIntegrationIT.java | 4 +- .../cluster/ClusterStateTaskListener.java | 18 +- .../cluster/coordination/Coordinator.java | 10 +- .../cluster/service/ClusterApplier.java | 21 +- .../service/ClusterApplierService.java | 230 +++++++++-------- .../indices/store/IndicesStore.java | 13 +- ...rnalClusterInfoServiceSchedulingTests.java | 4 +- .../coordination/NoOpClusterApplier.java | 2 +- .../health/ClusterStateHealthTests.java | 2 +- .../routing/BatchedRerouteServiceTests.java | 13 +- .../service/ClusterApplierServiceTests.java | 238 +++++++++--------- .../InternalSnapshotsInfoServiceTests.java | 6 +- .../AbstractCoordinatorTestCase.java | 4 +- .../test/ClusterServiceUtils.java | 8 +- .../BlockClusterStateProcessing.java | 9 +- .../SlowClusterStateProcessing.java | 9 +- 16 files changed, 324 insertions(+), 267 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 3600aaecb0b78..dc2b7ab0303b7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -432,12 +432,12 @@ public void testShardActiveElseWhere() throws Exception { CountDownLatch latch = new CountDownLatch(1); clusterApplierService.onNewClusterState("test", () -> newState, new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { latch.countDown(); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { latch.countDown(); throw new AssertionError("Expected a proper response", e); } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java index a1c265295b3e8..92973f32dbda0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java @@ -14,18 +14,18 @@ public interface ClusterStateTaskListener { /** * A callback for when task execution fails. * - * Implementations of this callback should not throw exceptions: an exception thrown here is logged by the master service at {@code - * ERROR} level and otherwise ignored. If log-and-ignore is the right behaviour then implementations should do so themselves, typically - * using a more specific logger and at a less dramatic log level. + * Implementations of this callback must not throw exceptions: an exception thrown here is logged by the master service at {@code ERROR} + * level and otherwise ignored, except in tests where it raises an {@link AssertionError}. If log-and-ignore is the right behaviour then + * implementations must do so themselves, typically using a more specific logger and at a less dramatic log level. */ void onFailure(String source, Exception e); /** * A callback for when the task was rejected because the processing node is no longer the elected master. * - * Implementations of this callback should not throw exceptions: an exception thrown here is logged by the master service at {@code - * ERROR} level and otherwise ignored. If log-and-ignore is the right behaviour then implementations should do so themselves, typically - * using a more specific logger and at a less dramatic log level. + * Implementations of this callback must not throw exceptions: an exception thrown here is logged by the master service at {@code ERROR} + * level and otherwise ignored, except in tests where it raises an {@link AssertionError}. If log-and-ignore is the right behaviour then + * implementations must do so themselves, typically using a more specific logger and at a less dramatic log level. */ default void onNoLongerMaster(String source) { onFailure(source, new NotMasterException("no longer master. source: [" + source + "]")); @@ -35,9 +35,9 @@ default void onNoLongerMaster(String source) { * Called when the result of the {@link ClusterStateTaskExecutor#execute(ClusterState, List)} have been processed * properly by all listeners. * - * Implementations of this callback should not throw exceptions: an exception thrown here is logged by the master service at {@code - * ERROR} level and otherwise ignored. If log-and-ignore is the right behaviour then implementations should do so themselves, typically - * using a more specific logger and at a less dramatic log level. + * Implementations of this callback must not throw exceptions: an exception thrown here is logged by the master service at {@code ERROR} + * level and otherwise ignored, except in tests where it raises an {@link AssertionError}. If log-and-ignore is the right behaviour then + * implementations must do so themselves, typically using a more specific logger and at a less dramatic log level. */ default void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 10132b1808ee7..120105ffd4e42 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -263,12 +263,12 @@ private void handleApplyCommit(ApplyCommitRequest applyCommitRequest, ActionList new ClusterApplyListener() { @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { applyListener.onFailure(e); } @Override - public void onSuccess(String source) { + public void onSuccess() { applyListener.onResponse(null); } }); @@ -532,7 +532,7 @@ void becomeCandidate(String method) { if (applierState.nodes().getMasterNodeId() != null) { applierState = clusterStateWithNoMasterBlock(applierState); - clusterApplier.onNewClusterState("becoming candidate: " + method, () -> applierState, (source, e) -> { + clusterApplier.onNewClusterState("becoming candidate: " + method, () -> applierState, e -> { }); } } @@ -1382,7 +1382,7 @@ public void onResponse(Void ignore) { clusterApplier.onNewClusterState(CoordinatorPublication.this.toString(), () -> applierState, new ClusterApplyListener() { @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { synchronized (mutex) { removePublicationAndPossiblyBecomeCandidate("clusterApplier#onNewClusterState"); } @@ -1392,7 +1392,7 @@ public void onFailure(String source, Exception e) { } @Override - public void onSuccess(String source) { + public void onSuccess() { clusterStatePublicationEvent.setMasterApplyElapsedMillis( transportService.getThreadPool().rawRelativeTimeInMillis() - completionTimeMillis); synchronized (mutex) { diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java index be7bdda88dd1a..666738c0d7b87 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplier.java @@ -33,17 +33,26 @@ public interface ClusterApplier { */ interface ClusterApplyListener { /** - * Called on successful cluster state application - * @param source information where the cluster state came from + * Called on successful cluster state application. + * + * Implementations of this callback must not throw exceptions: an exception thrown here is logged by the cluster applier service at + * {@code ERROR} level and otherwise ignored, except in tests where it raises an {@link AssertionError}. If log-and-ignore is the + * right behaviour then implementations must do so themselves, typically using a more specific logger and at a less dramatic log + * level. */ - default void onSuccess(String source) { + default void onSuccess() { } /** - * Called on failure during cluster state application - * @param source information where the cluster state came from + * Called on failure during cluster state application. + * + * Implementations of this callback must not throw exceptions: an exception thrown here is logged by the cluster applier service at + * {@code ERROR} level and otherwise ignored, except in tests where it raises an {@link AssertionError}. If log-and-ignore is the + * right behaviour then implementations must do so themselves, typically using a more specific logger and at a less dramatic log + * level. + * * @param e exception that occurred */ - void onFailure(String source, Exception e); + void onFailure(Exception e); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 21743d0a9462a..69ee6fff5830d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -11,30 +11,30 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateApplier; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateObserver; -import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.LocalNodeMasterListener; import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.TimeoutClusterStateListener; -import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.StopWatch; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.core.Releasable; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; @@ -64,7 +64,7 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements public static final String CLUSTER_UPDATE_THREAD_NAME = "clusterApplierService#updateTask"; private final ClusterSettings clusterSettings; - protected final ThreadPool threadPool; + private final ThreadPool threadPool; private volatile TimeValue slowTaskLoggingThreshold; @@ -131,25 +131,24 @@ protected PrioritizedEsThreadPoolExecutor createThreadPoolExecutor() { PrioritizedEsThreadPoolExecutor.StarvationWatcher.NOOP_STARVATION_WATCHER); } - class UpdateTask extends SourcePrioritizedRunnable implements Function { - final ClusterApplyListener listener; - final Function updateFunction; + class UpdateTask extends SourcePrioritizedRunnable { + private final ActionListener listener; + private final Function updateFunction; - UpdateTask(Priority priority, String source, ClusterApplyListener listener, - Function updateFunction) { + UpdateTask( + Priority priority, + String source, + ActionListener listener, + Function updateFunction + ) { super(priority, source); this.listener = listener; this.updateFunction = updateFunction; } - @Override - public ClusterState apply(ClusterState clusterState) { - return updateFunction.apply(clusterState); - } - @Override public void run() { - runTask(this); + runTask(source(), updateFunction, listener); } } @@ -175,7 +174,7 @@ protected synchronized void doClose() { * Should be renamed to appliedClusterState */ public ClusterState state() { - assert assertNotCalledFromClusterStateApplier("the applied cluster state is not yet available"); + assert assertNotCalledFromClusterStateApplier(); ClusterState clusterState = this.state.get(); assert clusterState != null : "initial cluster state not set yet"; return clusterState; @@ -280,9 +279,22 @@ public void run() { } } - public void runOnApplierThread(final String source, Consumer clusterStateConsumer, - final ClusterApplyListener listener, Priority priority) { - submitStateUpdateTask(source, ClusterStateTaskConfig.build(priority), + /** + * Run the given clusterStateConsumer on the applier thread. Should only be used in tests and by {@link IndicesStore} when it's deleting + * the data behind a shard that moved away from a node. + * + * @param priority {@link Priority#HIGH} unless in tests. + */ + // TODO get rid of this, make it so that shard data can be deleted without blocking the applier thread. + public void runOnApplierThread( + String source, + Priority priority, + Consumer clusterStateConsumer, + ClusterApplyListener listener + ) { + submitStateUpdateTask( + source, + priority, (clusterState) -> { clusterStateConsumer.accept(clusterState); return clusterState; @@ -290,51 +302,52 @@ public void runOnApplierThread(final String source, Consumer clust listener); } - public void runOnApplierThread(final String source, Consumer clusterStateConsumer, - final ClusterApplyListener listener) { - runOnApplierThread(source, clusterStateConsumer, listener, Priority.HIGH); - } - public ThreadPool threadPool() { return threadPool; } @Override - public void onNewClusterState(final String source, final Supplier clusterStateSupplier, - final ClusterApplyListener listener) { - Function applyFunction = currentState -> { - ClusterState nextState = clusterStateSupplier.get(); - if (nextState != null) { - return nextState; - } else { - return currentState; - } - }; - submitStateUpdateTask(source, ClusterStateTaskConfig.build(Priority.HIGH), applyFunction, listener); + public void onNewClusterState( + final String source, + final Supplier clusterStateSupplier, + final ClusterApplyListener listener + ) { + submitStateUpdateTask( + source, + Priority.HIGH, + currentState -> { + ClusterState nextState = clusterStateSupplier.get(); + if (nextState != null) { + return nextState; + } else { + return currentState; + } + }, listener); } - private void submitStateUpdateTask(final String source, final ClusterStateTaskConfig config, - final Function executor, - final ClusterApplyListener listener) { + private void submitStateUpdateTask( + final String source, + final Priority priority, + final Function clusterStateUpdate, + final ClusterApplyListener listener + ) { if (lifecycle.started() == false) { return; } + final ThreadContext threadContext = threadPool.getThreadContext(); - final Supplier supplier = threadContext.newRestorableContext(true); + final Supplier storedContextSupplier = threadContext.newRestorableContext(true); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { threadContext.markAsSystemContext(); - final UpdateTask updateTask = new UpdateTask(config.priority(), source, - new SafeClusterApplyListener(listener, supplier, logger), executor); - if (config.timeout() != null) { - threadPoolExecutor.execute(updateTask, config.timeout(), - () -> threadPool.generic().execute( - () -> listener.onFailure(source, new ProcessClusterEventTimeoutException(config.timeout(), source)))); - } else { - threadPoolExecutor.execute(updateTask); - } + threadPoolExecutor.execute(new UpdateTask( + priority, + source, + new ClusterApplyActionListener(source, listener, storedContextSupplier), + clusterStateUpdate)); } catch (EsRejectedExecutionException e) { - // ignore cases where we are shutting down..., there is really nothing interesting - // to be done here... + assert lifecycle.stoppedOrClosed() : e; + // ignore cases where we are shutting down..., there is really nothing interesting to be done here... if (lifecycle.stoppedOrClosed() == false) { throw e; } @@ -349,7 +362,7 @@ public static boolean assertNotClusterStateUpdateThread(String reason) { } /** asserts that the current stack trace does NOT involve a cluster state applier */ - private static boolean assertNotCalledFromClusterStateApplier(String reason) { + private static boolean assertNotCalledFromClusterStateApplier() { if (Thread.currentThread().getName().contains(CLUSTER_UPDATE_THREAD_NAME)) { for (StackTraceElement element : Thread.currentThread().getStackTrace()) { final String className = element.getClassName(); @@ -359,87 +372,95 @@ private static boolean assertNotCalledFromClusterStateApplier(String reason) { return true; } else if (className.equals(ClusterApplierService.class.getName()) && methodName.equals("callClusterStateAppliers")) { - throw new AssertionError("should not be called by a cluster state applier. reason [" + reason + "]"); + throw new AssertionError("should not be called by a cluster state applier: the applied state is not yet available"); } } } return true; } - private void runTask(UpdateTask task) { + private void runTask(String source, Function updateFunction, ActionListener clusterApplyListener) { if (lifecycle.started() == false) { - logger.debug("processing [{}]: ignoring, cluster applier service not started", task.source); + logger.debug("processing [{}]: ignoring, cluster applier service not started", source); return; } - logger.debug("processing [{}]: execute", task.source); + logger.debug("processing [{}]: execute", source); final ClusterState previousClusterState = state.get(); - long startTimeMS = currentTimeInMillis(); + final long startTimeMillis = threadPool.relativeTimeInMillis(); final StopWatch stopWatch = new StopWatch(); final ClusterState newClusterState; try { - try (Releasable ignored = stopWatch.timing("running task [" + task.source + ']')) { - newClusterState = task.apply(previousClusterState); + try (Releasable ignored = stopWatch.timing("running task [" + source + ']')) { + newClusterState = updateFunction.apply(previousClusterState); } } catch (Exception e) { - TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS)); + TimeValue executionTime = getTimeSince(startTimeMillis); logger.trace(() -> new ParameterizedMessage( "failed to execute cluster state applier in [{}], state:\nversion [{}], source [{}]\n{}", - executionTime, previousClusterState.version(), task.source, previousClusterState), e); - warnAboutSlowTaskIfNeeded(executionTime, task.source, stopWatch); - task.listener.onFailure(task.source, e); + executionTime, previousClusterState.version(), source, previousClusterState), e); + warnAboutSlowTaskIfNeeded(executionTime, source, stopWatch); + clusterApplyListener.onFailure(e); return; } if (previousClusterState == newClusterState) { - TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS)); - logger.debug("processing [{}]: took [{}] no change in cluster state", task.source, executionTime); - warnAboutSlowTaskIfNeeded(executionTime, task.source, stopWatch); - task.listener.onSuccess(task.source); + TimeValue executionTime = getTimeSince(startTimeMillis); + logger.debug("processing [{}]: took [{}] no change in cluster state", source, executionTime); + warnAboutSlowTaskIfNeeded(executionTime, source, stopWatch); + clusterApplyListener.onResponse(null); } else { if (logger.isTraceEnabled()) { - logger.debug("cluster state updated, version [{}], source [{}]\n{}", newClusterState.version(), task.source, + logger.debug("cluster state updated, version [{}], source [{}]\n{}", newClusterState.version(), source, newClusterState); } else { - logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), task.source); + logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), source); } try { - applyChanges(task, previousClusterState, newClusterState, stopWatch); - TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS)); - logger.debug("processing [{}]: took [{}] done applying updated cluster state (version: {}, uuid: {})", task.source, + applyChanges(previousClusterState, newClusterState, source, stopWatch); + TimeValue executionTime = getTimeSince(startTimeMillis); + logger.debug("processing [{}]: took [{}] done applying updated cluster state (version: {}, uuid: {})", source, executionTime, newClusterState.version(), newClusterState.stateUUID()); - warnAboutSlowTaskIfNeeded(executionTime, task.source, stopWatch); - task.listener.onSuccess(task.source); + warnAboutSlowTaskIfNeeded(executionTime, source, stopWatch); + clusterApplyListener.onResponse(null); } catch (Exception e) { - TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, currentTimeInMillis() - startTimeMS)); + TimeValue executionTime = getTimeSince(startTimeMillis); if (logger.isTraceEnabled()) { logger.warn(new ParameterizedMessage( - "failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]\n{}", - executionTime, newClusterState.version(), newClusterState.stateUUID(), task.source, newClusterState), e); + "failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]\n{}", + executionTime, newClusterState.version(), newClusterState.stateUUID(), source, newClusterState), e); } else { logger.warn(new ParameterizedMessage( - "failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]", - executionTime, newClusterState.version(), newClusterState.stateUUID(), task.source), e); + "failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]", + executionTime, newClusterState.version(), newClusterState.stateUUID(), source), e); } // failing to apply a cluster state with an exception indicates a bug in validation or in one of the appliers; if we // continue we will retry with the same cluster state but that might not help. assert applicationMayFail(); - task.listener.onFailure(task.source, e); + clusterApplyListener.onFailure(e); } } } - private void applyChanges(UpdateTask task, ClusterState previousClusterState, ClusterState newClusterState, StopWatch stopWatch) { - ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(task.source, newClusterState, previousClusterState); + private TimeValue getTimeSince(long startTimeMillis) { + return TimeValue.timeValueMillis(Math.max(0, threadPool.relativeTimeInMillis() - startTimeMillis)); + } + + private void applyChanges(ClusterState previousClusterState, ClusterState newClusterState, String source, StopWatch stopWatch) { + ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(source, newClusterState, previousClusterState); // new cluster state, notify all listeners final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta(); if (nodesDelta.hasChanges() && logger.isInfoEnabled()) { String summary = nodesDelta.shortSummary(); if (summary.length() > 0) { - logger.info("{}, term: {}, version: {}, reason: {}", - summary, newClusterState.term(), newClusterState.version(), task.source); + logger.info( + "{}, term: {}, version: {}, reason: {}", + summary, + newClusterState.term(), + newClusterState.version(), + source); } } @@ -515,33 +536,39 @@ private void callClusterStateListener(ClusterChangedEvent clusterChangedEvent, S } } - private static class SafeClusterApplyListener implements ClusterApplyListener { + private static class ClusterApplyActionListener implements ActionListener { + private final String source; private final ClusterApplyListener listener; - protected final Supplier context; - private final Logger logger; - - SafeClusterApplyListener(ClusterApplyListener listener, Supplier context, Logger logger) { + private final Supplier storedContextSupplier; + + ClusterApplyActionListener( + String source, + ClusterApplyListener listener, + Supplier storedContextSupplier + ) { + this.source = source; this.listener = listener; - this.context = context; - this.logger = logger; + this.storedContextSupplier = storedContextSupplier; } @Override - public void onFailure(String source, Exception e) { - try (ThreadContext.StoredContext ignore = context.get()) { - listener.onFailure(source, e); + public void onFailure(Exception e) { + try (ThreadContext.StoredContext ignored = storedContextSupplier.get()) { + listener.onFailure(e); } catch (Exception inner) { inner.addSuppressed(e); + assert false : inner; logger.error(new ParameterizedMessage( "exception thrown by listener notifying of failure from [{}]", source), inner); } } @Override - public void onSuccess(String source) { - try (ThreadContext.StoredContext ignore = context.get()) { - listener.onSuccess(source); + public void onResponse(Void unused) { + try (ThreadContext.StoredContext ignored = storedContextSupplier.get()) { + listener.onSuccess(); } catch (Exception e) { + assert false : e; logger.error(new ParameterizedMessage( "exception thrown by listener while notifying of cluster state processed from [{}]", source), e); } @@ -588,12 +615,7 @@ public void run() { } } - // this one is overridden in tests so we can control time - protected long currentTimeInMillis() { - return threadPool.relativeTimeInMillis(); - } - - // overridden by tests that need to check behaviour in the event of an application failure + // overridden by tests that need to check behaviour in the event of an application failure without tripping assertions protected boolean applicationMayFail() { return false; } diff --git a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 12edb43c1eaed..e06cba657782d 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Priority; import org.elasticsearch.core.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; @@ -263,7 +264,9 @@ private void allNodesResponded() { return; } - clusterService.getClusterApplierService().runOnApplierThread("indices_store ([" + shardId + "] active fully on other nodes)", + clusterService.getClusterApplierService().runOnApplierThread( + "indices_store ([" + shardId + "] active fully on other nodes)", + Priority.HIGH, currentState -> { if (clusterStateVersion != currentState.getVersion()) { logger.trace("not deleting shard {}, the update task state version[{}] is not equal to cluster state before " + @@ -276,9 +279,11 @@ private void allNodesResponded() { logger.debug(() -> new ParameterizedMessage("{} failed to delete unallocated shard, ignoring", shardId), ex); } }, - (source, e) -> logger.error(() -> new ParameterizedMessage("{} unexpected error during deletion of unallocated shard", - shardId), e) - ); + e -> logger.error( + () -> new ParameterizedMessage( + "{} unexpected error during deletion of unallocated shard", + shardId), + e)); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/InternalClusterInfoServiceSchedulingTests.java b/server/src/test/java/org/elasticsearch/cluster/InternalClusterInfoServiceSchedulingTests.java index 1d9ba7263323d..c1bbf7a9a55f4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/InternalClusterInfoServiceSchedulingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/InternalClusterInfoServiceSchedulingTests.java @@ -141,12 +141,12 @@ private static ClusterApplier.ClusterApplyListener setFlagOnSuccess(AtomicBoolea return new ClusterApplier.ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { assertTrue(flag.compareAndSet(false, true)); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { throw new AssertionError("unexpected", e); } }; diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NoOpClusterApplier.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NoOpClusterApplier.java index 9f315b07cd5df..1485793a9c2cd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NoOpClusterApplier.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NoOpClusterApplier.java @@ -20,6 +20,6 @@ public void setInitialState(ClusterState initialState) { @Override public void onNewClusterState(String source, Supplier clusterStateSupplier, ClusterApplyListener listener) { - listener.onSuccess(source); + listener.onSuccess(); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index 4a4dfe1120185..944df1d26fc6d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -127,7 +127,7 @@ public void testClusterHealthWaitsForClusterStateApplication() throws Interrupte clusterService.getClusterApplierService().onNewClusterState("restore master", () -> ClusterState.builder(currentState) .nodes(DiscoveryNodes.builder(currentState.nodes()).masterNodeId(currentState.nodes().getLocalNodeId())).build(), - (source, e) -> {}); + e -> {}); logger.info("--> waiting for listener to be called and cluster state being blocked"); listenerCalled.await(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/BatchedRerouteServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/BatchedRerouteServiceTests.java index 88bbbcffb3c1a..07b13296523cc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/BatchedRerouteServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/BatchedRerouteServiceTests.java @@ -204,11 +204,14 @@ public void testNotifiesOnFailure() throws InterruptedException { } if (rarely()) { - clusterService.getClusterApplierService().onNewClusterState("simulated", () -> { - ClusterState state = clusterService.state(); - return ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()) - .masterNodeId(randomBoolean() ? null : state.nodes().getLocalNodeId())).build(); - }, (source, e) -> { }); + clusterService.getClusterApplierService().onNewClusterState( + "simulated", + () -> { + ClusterState state = clusterService.state(); + return ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()) + .masterNodeId(randomBoolean() ? null : state.nodes().getLocalNodeId())).build(); + }, + e -> { }); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java index 4a1f935baae73..3b8d390adb5e0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.cluster.service; + import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -23,26 +24,25 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.service.ClusterApplier.ClusterApplyListener; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; -import org.junit.AfterClass; import org.junit.Before; -import org.junit.BeforeClass; import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -55,49 +55,64 @@ public class ClusterApplierServiceTests extends ESTestCase { - private static ThreadPool threadPool; - private TimedClusterApplierService clusterApplierService; - - @BeforeClass - public static void createThreadPool() { - threadPool = new TestThreadPool(ClusterApplierServiceTests.class.getName()); - } - - @AfterClass - public static void stopThreadPool() { - if (threadPool != null) { - threadPool.shutdownNow(); - threadPool = null; - } - } + private ThreadPool threadPool; + private long currentTimeMillis; + private boolean allowClusterStateApplicationFailure = false; + private ClusterApplierService clusterApplierService; + private ClusterSettings clusterSettings; @Before public void setUp() throws Exception { super.setUp(); - clusterApplierService = createTimedClusterService(true); + threadPool = new TestThreadPool(ClusterApplierServiceTests.class.getName()) { + @Override + public long relativeTimeInMillis() { + assertThat(Thread.currentThread().getName(), containsString(ClusterApplierService.CLUSTER_UPDATE_THREAD_NAME)); + return currentTimeMillis; + } + }; + clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + allowClusterStateApplicationFailure = false; + clusterApplierService = createClusterApplierService(true); } @After public void tearDown() throws Exception { clusterApplierService.close(); + if (threadPool != null) { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + } super.tearDown(); } - private TimedClusterApplierService createTimedClusterService(boolean makeMaster) { - DiscoveryNode localNode = new DiscoveryNode("node1", buildNewFakeTransportAddress(), emptyMap(), - emptySet(), Version.CURRENT); - TimedClusterApplierService timedClusterApplierService = new TimedClusterApplierService(Settings.builder().put("cluster.name", - "ClusterApplierServiceTests").build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - threadPool); - timedClusterApplierService.setNodeConnectionsService(createNoOpNodeConnectionsService()); - timedClusterApplierService.setInitialState(ClusterState.builder(new ClusterName("ClusterApplierServiceTests")) + private ClusterApplierService createClusterApplierService(boolean makeMaster) { + final DiscoveryNode localNode = new DiscoveryNode("node1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); + final ClusterApplierService clusterApplierService = new ClusterApplierService( + "test_node", + Settings.builder().put("cluster.name", "ClusterApplierServiceTests").build(), + clusterSettings, + threadPool + ) { + @Override + protected boolean applicationMayFail() { + return allowClusterStateApplicationFailure; + } + }; + clusterApplierService.setNodeConnectionsService(createNoOpNodeConnectionsService()); + clusterApplierService.setInitialState(ClusterState.builder(new ClusterName("ClusterApplierServiceTests")) .nodes(DiscoveryNodes.builder() .add(localNode) .localNodeId(localNode.getId()) .masterNodeId(makeMaster ? localNode.getId() : null)) .blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK).build()); - timedClusterApplierService.start(); - return timedClusterApplierService; + clusterApplierService.start(); + return clusterApplierService; + } + + private void advanceTime(long millis) { + // time is only read/written on applier thread, so no synchronization is needed + assertThat(Thread.currentThread().getName(), containsString(ClusterApplierService.CLUSTER_UPDATE_THREAD_NAME)); + currentTimeMillis += millis; } @TestLogging(value = "org.elasticsearch.cluster.service:TRACE", reason = "to ensure that we log cluster state events on TRACE level") @@ -126,44 +141,54 @@ public void testClusterStateUpdateLogging() throws Exception { Logger clusterLogger = LogManager.getLogger(ClusterApplierService.class); Loggers.addAppender(clusterLogger, mockAppender); try { - clusterApplierService.currentTimeOverride = threadPool.relativeTimeInMillis(); - clusterApplierService.runOnApplierThread("test1", - currentState -> clusterApplierService.currentTimeOverride += TimeValue.timeValueSeconds(1).millis(), + currentTimeMillis = randomLongBetween(0L, Long.MAX_VALUE / 2); + clusterApplierService.runOnApplierThread( + "test1", + Priority.HIGH, + currentState -> advanceTime(TimeValue.timeValueSeconds(1).millis()), new ClusterApplyListener() { @Override - public void onSuccess(String source) { } + public void onSuccess() { + } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { fail(); } - }); - clusterApplierService.runOnApplierThread("test2", + } + ); + clusterApplierService.runOnApplierThread( + "test2", + Priority.HIGH, currentState -> { - clusterApplierService.currentTimeOverride += TimeValue.timeValueSeconds(2).millis(); + advanceTime(TimeValue.timeValueSeconds(2).millis()); throw new IllegalArgumentException("Testing handling of exceptions in the cluster state task"); }, new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { fail(); } @Override - public void onFailure(String source, Exception e) { } - }); + public void onFailure(Exception e) { } + } + ); // Additional update task to make sure all previous logging made it to the loggerName - clusterApplierService.runOnApplierThread("test3", + clusterApplierService.runOnApplierThread( + "test3", + Priority.HIGH, currentState -> {}, new ClusterApplyListener() { @Override - public void onSuccess(String source) { } + public void onSuccess() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { fail(); } - }); + } + ); assertBusy(mockAppender::assertAllExpectationsMatched); } finally { Loggers.removeAppender(clusterLogger, mockAppender); @@ -201,66 +226,78 @@ public void testLongClusterStateUpdateLogging() throws Exception { try { final CountDownLatch latch = new CountDownLatch(4); final CountDownLatch processedFirstTask = new CountDownLatch(1); - clusterApplierService.currentTimeOverride = threadPool.relativeTimeInMillis(); - clusterApplierService.runOnApplierThread("test1", - currentState -> clusterApplierService.currentTimeOverride += TimeValue.timeValueSeconds(1).millis(), + currentTimeMillis = randomLongBetween(0L, Long.MAX_VALUE / 2); + clusterApplierService.runOnApplierThread( + "test1", + Priority.HIGH, + currentState -> advanceTime(TimeValue.timeValueSeconds(1).millis()), new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { latch.countDown(); processedFirstTask.countDown(); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { fail(); } - }); + } + ); processedFirstTask.await(); - clusterApplierService.runOnApplierThread("test2", + clusterApplierService.runOnApplierThread( + "test2", + Priority.HIGH, currentState -> { - clusterApplierService.currentTimeOverride += TimeValue.timeValueSeconds(32).millis(); + advanceTime(TimeValue.timeValueSeconds(32).millis()); throw new IllegalArgumentException("Testing handling of exceptions in the cluster state task"); }, new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { fail(); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { latch.countDown(); } - }); - clusterApplierService.runOnApplierThread("test3", - currentState -> clusterApplierService.currentTimeOverride += TimeValue.timeValueSeconds(34).millis(), + } + ); + clusterApplierService.runOnApplierThread( + "test3", + Priority.HIGH, + currentState -> advanceTime(TimeValue.timeValueSeconds(34).millis()), new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { latch.countDown(); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { fail(); } - }); + } + ); // Additional update task to make sure all previous logging made it to the loggerName // We don't check logging for this on since there is no guarantee that it will occur before our check - clusterApplierService.runOnApplierThread("test4", + clusterApplierService.runOnApplierThread( + "test4", + Priority.HIGH, currentState -> {}, new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { latch.countDown(); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { fail(); } - }); + } + ); latch.await(); } finally { Loggers.removeAppender(clusterLogger, mockAppender); @@ -270,10 +307,10 @@ public void onFailure(String source, Exception e) { } public void testLocalNodeMasterListenerCallbacks() { - TimedClusterApplierService timedClusterApplierService = createTimedClusterService(false); + ClusterApplierService clusterApplierService = createClusterApplierService(false); AtomicBoolean isMaster = new AtomicBoolean(); - timedClusterApplierService.addLocalNodeMasterListener(new LocalNodeMasterListener() { + clusterApplierService.addLocalNodeMasterListener(new LocalNodeMasterListener() { @Override public void onMaster() { isMaster.set(true); @@ -285,25 +322,25 @@ public void offMaster() { } }); - ClusterState state = timedClusterApplierService.state(); + ClusterState state = clusterApplierService.state(); DiscoveryNodes nodes = state.nodes(); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(nodes).masterNodeId(nodes.getLocalNodeId()); state = ClusterState.builder(state).blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK).nodes(nodesBuilder).build(); - setState(timedClusterApplierService, state); + setState(clusterApplierService, state); assertThat(isMaster.get(), is(true)); nodes = state.nodes(); nodesBuilder = DiscoveryNodes.builder(nodes).masterNodeId(null); state = ClusterState.builder(state).blocks(ClusterBlocks.builder().addGlobalBlock(NoMasterBlockService.NO_MASTER_BLOCK_WRITES)) .nodes(nodesBuilder).build(); - setState(timedClusterApplierService, state); + setState(clusterApplierService, state); assertThat(isMaster.get(), is(false)); nodesBuilder = DiscoveryNodes.builder(nodes).masterNodeId(nodes.getLocalNodeId()); state = ClusterState.builder(state).blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK).nodes(nodesBuilder).build(); - setState(timedClusterApplierService, state); + setState(clusterApplierService, state); assertThat(isMaster.get(), is(true)); - timedClusterApplierService.close(); + clusterApplierService.close(); } public void testClusterStateApplierCantSampleClusterState() throws InterruptedException { @@ -326,12 +363,12 @@ public void testClusterStateApplierCantSampleClusterState() throws InterruptedEx new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { latch.countDown(); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { error.compareAndSet(null, e); } } @@ -347,20 +384,20 @@ public void testClusterStateApplierBubblesUpExceptionsInApplier() throws Interru clusterApplierService.addStateApplier(event -> { throw new RuntimeException("dummy exception"); }); - clusterApplierService.allowClusterStateApplicationFailure(); + allowClusterStateApplicationFailure = true; CountDownLatch latch = new CountDownLatch(1); clusterApplierService.onNewClusterState("test", () -> ClusterState.builder(clusterApplierService.state()).build(), new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { latch.countDown(); fail("should not be called"); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { assertTrue(error.compareAndSet(null, e)); latch.countDown(); } @@ -374,9 +411,8 @@ public void onFailure(String source, Exception e) { public void testClusterStateApplierBubblesUpExceptionsInSettingsApplier() throws InterruptedException { AtomicReference error = new AtomicReference<>(); - clusterApplierService.clusterSettings.addSettingsUpdateConsumer(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, - v -> {}); - clusterApplierService.allowClusterStateApplicationFailure(); + clusterSettings.addSettingsUpdateConsumer(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, v -> {}); + allowClusterStateApplicationFailure = true; CountDownLatch latch = new CountDownLatch(1); clusterApplierService.onNewClusterState("test", () -> ClusterState.builder(clusterApplierService.state()) @@ -388,13 +424,13 @@ public void testClusterStateApplierBubblesUpExceptionsInSettingsApplier() throws new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { latch.countDown(); fail("should not be called"); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { assertTrue(error.compareAndSet(null, e)); latch.countDown(); } @@ -419,12 +455,12 @@ public void testClusterStateApplierSwallowsExceptionInListener() throws Interrup new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { latch.countDown(); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { error.compareAndSet(null, e); } } @@ -468,12 +504,12 @@ public void onTimeout(TimeValue timeout) { clusterApplierService.onNewClusterState("test", () -> ClusterState.builder(clusterApplierService.state()).build(), new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { latch.countDown(); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { error.compareAndSet(null, e); } }); @@ -505,7 +541,7 @@ public void testThreadContext() throws InterruptedException { }, new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); @@ -513,7 +549,7 @@ public void onSuccess(String source) { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); @@ -525,30 +561,4 @@ public void onFailure(String source, Exception e) { latch.await(); } - static class TimedClusterApplierService extends ClusterApplierService { - - final ClusterSettings clusterSettings; - volatile Long currentTimeOverride = null; - boolean applicationMayFail; - - TimedClusterApplierService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool) { - super("test_node", settings, clusterSettings, threadPool); - this.clusterSettings = clusterSettings; - } - - @Override - protected long currentTimeInMillis() { - return Objects.requireNonNullElseGet(currentTimeOverride, super::currentTimeInMillis); - } - - @Override - protected boolean applicationMayFail() { - return this.applicationMayFail; - } - - void allowClusterStateApplicationFailure() { - this.applicationMayFail = true; - } - } - } diff --git a/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java index e9a7642fa96f7..dab51f7284e92 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java @@ -386,12 +386,12 @@ private void applyClusterState(final String reason, final Function clusterService.getClusterApplierService() .onNewClusterState(reason, () -> applier.apply(clusterService.state()), new ClusterApplier.ClusterApplyListener() { @Override - public void onSuccess(String source) { - future.onResponse(source); + public void onSuccess() { + future.onResponse(null); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { future.onFailure(e); } }) diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 72f23c5b43f51..16ac54c1ad4f8 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -1322,6 +1322,7 @@ public void onNodeAck(DiscoveryNode node, Exception e) { static class DisruptableClusterApplierService extends ClusterApplierService { private final String nodeName; private final DeterministicTaskQueue deterministicTaskQueue; + private final ThreadPool threadPool; ClusterStateApplyResponse clusterStateApplyResponse = ClusterStateApplyResponse.SUCCEED; private boolean applicationMayFail; @@ -1330,6 +1331,7 @@ static class DisruptableClusterApplierService extends ClusterApplierService { super(nodeName, settings, clusterSettings, threadPool); this.nodeName = nodeName; this.deterministicTaskQueue = deterministicTaskQueue; + this.threadPool = threadPool; addStateApplier(event -> { switch (clusterStateApplyResponse) { case SUCCEED: @@ -1355,7 +1357,7 @@ public void onNewClusterState(String source, Supplier clusterState if (clusterStateApplyResponse == ClusterStateApplyResponse.HANG) { if (randomBoolean()) { // apply cluster state, but don't notify listener - super.onNewClusterState(source, clusterStateSupplier, (source1, e) -> { + super.onNewClusterState(source, clusterStateSupplier, e -> { // ignore result }); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java index 7d5cb754844e8..fa4a6531ba261 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java @@ -50,12 +50,12 @@ public static void setState(ClusterApplierService executor, ClusterState cluster executor.onNewClusterState("test setting state", () -> ClusterState.builder(clusterState).version(clusterState.version() + 1).build(), new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { latch.countDown(); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { exception.set(e); latch.countDown(); } @@ -150,12 +150,12 @@ public static ClusterStatePublisher createClusterStatePublisher(ClusterApplier c clusterStatePublicationEvent::getNewState, new ClusterApplyListener() { @Override - public void onSuccess(String source) { + public void onSuccess() { publishListener.onResponse(null); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { publishListener.onFailure(e); } }); diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java index 6ffeb82e687f0..2465a87339a49 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java @@ -41,7 +41,9 @@ public void startDisrupting() { boolean success = disruptionLatch.compareAndSet(null, new CountDownLatch(1)); assert success : "startDisrupting called without waiting on stopDisrupting to complete"; final CountDownLatch started = new CountDownLatch(1); - clusterService.getClusterApplierService().runOnApplierThread("service_disruption_block", + clusterService.getClusterApplierService().runOnApplierThread( + "service_disruption_block", + Priority.IMMEDIATE, currentState -> { started.countDown(); CountDownLatch latch = disruptionLatch.get(); @@ -52,8 +54,9 @@ public void startDisrupting() { Throwables.rethrow(e); } } - }, (source, e) -> logger.error("unexpected error during disruption", e), - Priority.IMMEDIATE); + }, + e -> logger.error("unexpected error during disruption", e) + ); try { started.await(); } catch (InterruptedException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java index d95a45a49cf4e..af5c81a6e002b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java @@ -90,7 +90,9 @@ private boolean interruptClusterStateProcessing(final TimeValue duration) throws return false; } final AtomicBoolean stopped = new AtomicBoolean(false); - clusterService.getClusterApplierService().runOnApplierThread("service_disruption_delay", + clusterService.getClusterApplierService().runOnApplierThread( + "service_disruption_delay", + Priority.IMMEDIATE, currentState -> { try { long count = duration.millis() / 200; @@ -105,8 +107,9 @@ private boolean interruptClusterStateProcessing(final TimeValue duration) throws } catch (InterruptedException e) { ExceptionsHelper.reThrowIfNotNull(e); } - }, (source, e) -> countDownLatch.countDown(), - Priority.IMMEDIATE); + }, + e -> countDownLatch.countDown() + ); try { countDownLatch.await(); } catch (InterruptedException e) { From b9ad9860afd10a31aacbb31991cce037c75eabdd Mon Sep 17 00:00:00 2001 From: Adam Locke Date: Tue, 31 Aug 2021 12:37:22 -0400 Subject: [PATCH 045/128] [DOCS] Add ES security principles (#76850) * [DOCS] Add ES security principles * Incorporating review feedback * More changes from review feedback * Fix cross-link to Painless guide * Clarify callout text * Add information about elasticsearch user * Minor wording edits * Consolidate Java Security Manager description, plus other edits * Clarify not running as root Co-authored-by: Elastic Machine --- docs/reference/redirects.asciidoc | 5 + docs/reference/scripting/security.asciidoc | 111 +++++------------- .../security/es-security-principles.asciidoc | 54 +++++++++ x-pack/docs/en/security/index.asciidoc | 16 ++- .../security-manual-configuration.asciidoc | 1 + 5 files changed, 100 insertions(+), 87 deletions(-) create mode 100644 x-pack/docs/en/security/es-security-principles.asciidoc diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 5e253d39e1ef1..4a0f33d4063fc 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -3,6 +3,11 @@ The following pages have moved or been deleted. +[role="exclude",id="modules-scripting-other-layers"] +=== Other security layers + +Refer to <>. + [role="exclude",id="grok-basics"] === Grok basics diff --git a/docs/reference/scripting/security.asciidoc b/docs/reference/scripting/security.asciidoc index db81f57a7d754..2726938cb179d 100644 --- a/docs/reference/scripting/security.asciidoc +++ b/docs/reference/scripting/security.asciidoc @@ -1,114 +1,63 @@ [[modules-scripting-security]] == Scripting and security +Painless and {es} implement layers of security to build a defense in depth +strategy for running scripts safely. -While Elasticsearch contributors make every effort to prevent scripts from -running amok, security is something best done in -{wikipedia}/Defense_in_depth_(computing)[layers] because -all software has bugs and it is important to minimize the risk of failure in -any security layer. Find below rules of thumb for how to keep Elasticsearch -from being a vulnerability. +Painless uses a fine-grained allowlist. Anything that is not part of the +allowlist results in a compilation error. This capability is the first layer of +security in a defense in depth strategy for scripting. -[discrete] -=== Do not run as root -First and foremost, never run Elasticsearch as the `root` user as this would -allow any successful effort to circumvent the other security layers to do -*anything* on your server. Elasticsearch will refuse to start if it detects -that it is running as `root` but this is so important that it is worth double -and triple checking. - -[discrete] -=== Do not expose Elasticsearch directly to users -Do not expose Elasticsearch directly to users, instead have an application -make requests on behalf of users. If this is not possible, have an application -to sanitize requests from users. If *that* is not possible then have some -mechanism to track which users did what. Understand that it is quite possible -to write a <> that overwhelms Elasticsearch and brings down -the cluster. All such searches should be considered bugs and the Elasticsearch -contributors make an effort to prevent this but they are still possible. - -[discrete] -=== Do not expose Elasticsearch directly to the Internet -Do not expose Elasticsearch to the Internet, instead have an application -make requests on behalf of the Internet. Do not entertain the thought of having -an application "sanitize" requests to Elasticsearch. Understand that it is -possible for a sufficiently determined malicious user to write searches that -overwhelm the Elasticsearch cluster and bring it down. For example: - -Good: - -* Users type text into a search box and the text is sent directly to a -<>, <>, -<>, or any of the <>. -* Running a script with any of the above queries that was written as part of -the application development process. -* Running a script with `params` provided by users. -* User actions makes documents with a fixed structure. +The second layer of security is the https://www.oracle.com/java/technologies/javase/seccodeguide.html[Java Security Manager]. As part of its startup +sequence, {es} enables the Java Security Manager to limit the actions that +portions of the code can take. <> uses +the Java Security Manager as an additional layer of defense to prevent scripts +from doing things like writing files and listening to sockets. -Bad: - -* Users can write arbitrary scripts, queries, `_search` requests. -* User actions make documents with structure defined by users. - -[discrete] -[[modules-scripting-other-layers]] -=== Other security layers -In addition to user privileges and script sandboxing Elasticsearch uses the -https://www.oracle.com/java/technologies/javase/seccodeguide.html[Java Security Manager] -and native security tools as additional layers of security. - -As part of its startup sequence Elasticsearch enables the Java Security Manager -which limits the actions that can be taken by portions of the code. Painless -uses this to limit the actions that generated Painless scripts can take, -preventing them from being able to do things like write files and listen to -sockets. - -Elasticsearch uses +{es} uses {wikipedia}/Seccomp[seccomp] in Linux, https://www.chromium.org/developers/design-documents/sandbox/osx-sandboxing-design[Seatbelt] in macOS, and https://msdn.microsoft.com/en-us/library/windows/desktop/ms684147[ActiveProcessLimit] -on Windows to prevent Elasticsearch from forking or executing other processes. +on Windows as additional security layers to prevent {es} from forking or +running other processes. -Below this we describe the security settings for scripts and how you can -change from the defaults described above. You should be very, very careful -when allowing more than the defaults. Any extra permissions weakens the total -security of the Elasticsearch deployment. +You can modify the following script settings to restrict the type of scripts +that are allowed to run, and control the available +{painless}/painless-contexts.html[contexts] that scripts can run in. To +implement additional layers in your defense in depth strategy, follow the +<>. [[allowed-script-types-setting]] [discrete] === Allowed script types setting -Elasticsearch supports two script types: `inline` and `stored` (<>). -By default, {es} is configured to run both types of scripts. -To limit what type of scripts are run, set `script.allowed_types` to `inline` or `stored`. -To prevent any scripts from running, set `script.allowed_types` to `none`. +{es} supports two script types: `inline` and `stored`. By default, {es} is +configured to run both types of scripts. To limit what type of scripts are run, +set `script.allowed_types` to `inline` or `stored`. To prevent any scripts from +running, set `script.allowed_types` to `none`. IMPORTANT: If you use {kib}, set `script.allowed_types` to `both` or `inline`. Some {kib} features rely on inline scripts and do not function as expected if {es} does not allow inline scripts. -For example, to run `inline` scripts but not `stored` scripts, specify: +For example, to run `inline` scripts but not `stored` scripts: [source,yaml] ---- -script.allowed_types: inline <1> +script.allowed_types: inline ---- -<1> This will allow only inline scripts to be executed but not stored scripts -(or any other types). - [[allowed-script-contexts-setting]] [discrete] === Allowed script contexts setting -By default all script contexts are allowed to be executed. This can be modified using the -setting `script.allowed_contexts`. Only the contexts specified as part of the setting will -be allowed to be executed. To specify no contexts are allowed, set `script.allowed_contexts` -to be `none`. +By default, all script contexts are permitted. Use the `script.allowed_contexts` +setting to specify the contexts that are allowed. To specify that no contexts +are allowed, set `script.allowed_contexts` to `none`. + +For example, to allow scripts to run only in `scoring` and `update` contexts: [source,yaml] ---- -script.allowed_contexts: score, update <1> +script.allowed_contexts: score, update ---- -<1> This will allow only scoring and update scripts to be executed but not -aggs or plugin scripts (or any other contexts). diff --git a/x-pack/docs/en/security/es-security-principles.asciidoc b/x-pack/docs/en/security/es-security-principles.asciidoc new file mode 100644 index 0000000000000..4c6eeaec370af --- /dev/null +++ b/x-pack/docs/en/security/es-security-principles.asciidoc @@ -0,0 +1,54 @@ +[[es-security-principles]] +== {es} security principles +Protecting your {es} cluster and the data it contains is of utmost importance. +Implementing a defense in depth strategy provides multiple layers of security +to help safeguard your system. The following principles provide a foundation +for running {es} in a secure manner that helps to mitigate attacks on your +system at multiple levels. + +[discrete] +[[security-run-with-security]] +=== Run {es} with security enabled +Never run an {es} cluster without security enabled. This principle cannot be +overstated. Running {es} without security leaves your cluster exposed to anyone +who can send network traffic to {es}, permitting these individuals to download, +modify, or delete any data in your cluster. +<> or +<> to prevent +unauthorized access to your clusters and ensure that internode communication +is secure. + +[discrete] +[[security-not-root-user]] +=== Run {es} with a dedicated non-root user +Never try to run {es} as the `root` user, which would invalidate any defense +strategy and permit a malicious user to do *anything* on your server. You must +create a dedicated, unprivileged user to run {es}. By default, the `rpm`, `deb`, +`docker`, and Windows packages of {es} contain an `elasticsearch` user with +this scope. + +[discrete] +[[security-protect-cluster-traffic]] +=== Protect {es} from public internet traffic +Even with security enabled, never expose {es} to public internet traffic. +Using an application to sanitize requests to {es} still poses risks, such as +a malicious user writing <> requests that could overwhelm an +{es} cluster and bring it down. Keep {es} as isolated as possible, preferably +behind a firewall and a VPN. Any internet-facing applications should run +pre-canned aggregations, or not run aggregations at all. + +While you absolutely shouldn't expose {es} directly to the internet, you also +shouldn't expose {es} directly to users. Instead, use an intermediary +application to make requests on behalf of users. This implementation allows you +to track user behaviors, such as can submit requests, and to which specific +nodes in the cluster. For example, you can implement an application that accepts +a search term from a user and funnels it through a +<> query. + +[discrete] +[[security-create-appropriate-users]] +=== Implement role based access control +<> for your users and +<> to ensure that users have +access only to the resources that they need. This process determines whether the +user behind an incoming request is allowed to run that request. \ No newline at end of file diff --git a/x-pack/docs/en/security/index.asciidoc b/x-pack/docs/en/security/index.asciidoc index 7ee5f233b26ad..d857eee896738 100644 --- a/x-pack/docs/en/security/index.asciidoc +++ b/x-pack/docs/en/security/index.asciidoc @@ -1,19 +1,21 @@ [[secure-cluster]] -= Secure the Elastic Stack += Secure the {stack} [partintro] -- -The Elastic Stack is comprised of many moving parts. There are the {es} +The {stack} is comprised of many moving parts. There are the {es} nodes that form the cluster, plus {ls} instances, {kib} instances, {beats} -agents, and clients all communicating with the cluster. +agents, and clients all communicating with the cluster. To keep your cluster +safe, adhere to the <>. -<> to +<> or +<> to secure {es} clusters and any clients that communicate with your clusters. You can password protect access to your data as well as enable more advanced security by configuring Transport Layer Security (TLS). This additional layer provides confidentiality and integrity protection to your communications with -the Elastic Stack. You can also implement additional security measures, such as +the {stack}. You can also implement additional security measures, such as role-based access control, IP filtering, and auditing. Enabling security protects {es} clusters by: @@ -71,7 +73,7 @@ of your data against tampering, while also providing _confidentiality_ by encrypting communications to, from, and within the cluster. For even greater protection, you can increase the <>. -See <>. +See <>. [discrete] [[maintaining-audit-trail]] @@ -90,6 +92,8 @@ See <>. -- +include::es-security-principles.asciidoc[] + include::configuring-stack-security.asciidoc[] include::security-manual-configuration.asciidoc[] diff --git a/x-pack/docs/en/security/security-manual-configuration.asciidoc b/x-pack/docs/en/security/security-manual-configuration.asciidoc index 8b77a6fe45047..9b5a5684a49a1 100644 --- a/x-pack/docs/en/security/security-manual-configuration.asciidoc +++ b/x-pack/docs/en/security/security-manual-configuration.asciidoc @@ -1,3 +1,4 @@ +[[manually-configure-security]] == Manually configure security ++++ Configure security From ec11f9f931f968b98a12602c37d21119a71b8e0a Mon Sep 17 00:00:00 2001 From: Francois-Clement Brossard Date: Wed, 1 Sep 2021 03:02:24 +0900 Subject: [PATCH 046/128] Execute enrich policy wait_for_completion docfix (#77046) --- .../ingest/apis/enrich/execute-enrich-policy.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc index 35e9b9e69b59c..a213146a49afe 100644 --- a/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc +++ b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc @@ -102,8 +102,8 @@ it may take a while to return a response. (Required, string) Enrich policy to execute. -[[execute-enrich-policy-api-request-body]] -==== {api-request-body-title} +[[execute-enrich-policy-api-query-params]] +==== {api-query-parms-title} `wait_for_completion`:: (Required, Boolean) From 707dd497e489bb6917dc3d0bd58928ce13a33927 Mon Sep 17 00:00:00 2001 From: Christos Soulios <1561376+csoulios@users.noreply.github.com> Date: Tue, 31 Aug 2021 21:28:14 +0300 Subject: [PATCH 047/128] Add multiple validators to Parameters (#77073) This PR implements support for multiple validators to a FieldMapper.Parameter. The Parameter#setValidator method was replaced by Parameter#addValidator that can be called multipled times to add validation to a parameter. All validators of a parameter will be executed in the same order as they have been added and if any of them fails all validation will failed. --- .../index/mapper/ScaledFloatFieldMapper.java | 2 +- .../mapper/SearchAsYouTypeFieldMapper.java | 4 ++-- .../mapper/ICUCollationKeywordFieldMapper.java | 2 +- .../index/mapper/CompletionFieldMapper.java | 2 +- .../index/mapper/CompositeRuntimeField.java | 4 ++-- .../index/mapper/FieldMapper.java | 18 ++++++++++-------- .../index/mapper/IpFieldMapper.java | 2 +- .../index/mapper/KeywordFieldMapper.java | 2 +- .../mapper/LegacyGeoShapeFieldMapper.java | 2 +- .../index/mapper/NumberFieldMapper.java | 2 +- .../elasticsearch/index/mapper/TextParams.java | 8 ++++---- .../mapper/flattened/FlattenedFieldMapper.java | 2 +- .../index/mapper/ParametrizedMapperTests.java | 13 +++++++++++-- .../AggregateDoubleMetricFieldMapper.java | 2 +- .../vectors/mapper/DenseVectorFieldMapper.java | 2 +- .../wildcard/mapper/WildcardFieldMapper.java | 16 ++++++++-------- 16 files changed, 47 insertions(+), 36 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index 4b3a65d91fd60..92531b4cc5c46 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -70,7 +70,7 @@ public static class Builder extends FieldMapper.Builder { private final Parameter scalingFactor = new Parameter<>("scaling_factor", false, () -> null, (n, c, o) -> XContentMapValues.nodeDoubleValue(o), m -> toType(m).scalingFactor) - .setValidator(v -> { + .addValidator(v -> { if (v == null) { throw new IllegalArgumentException("Field [scaling_factor] is required"); } diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java index 450b98cc23173..a26308a7db76c 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java @@ -97,7 +97,7 @@ public static class Builder extends FieldMapper.Builder { // `doc_values=false`, even though it cannot be set; and so we need to continue // serializing it forever because of mapper assertions in mixed clusters. private final Parameter docValues = Parameter.docValuesParam(m -> false, false) - .setValidator(v -> { + .addValidator(v -> { if (v) { throw new MapperParsingException("Cannot set [doc_values] on field of type [search_as_you_type]"); } @@ -106,7 +106,7 @@ public static class Builder extends FieldMapper.Builder { private final Parameter maxShingleSize = Parameter.intParam("max_shingle_size", false, m -> builder(m).maxShingleSize.get(), Defaults.MAX_SHINGLE_SIZE) - .setValidator(v -> { + .addValidator(v -> { if (v < MAX_SHINGLE_SIZE_LOWER_BOUND || v > MAX_SHINGLE_SIZE_UPPER_BOUND) { throw new MapperParsingException("[max_shingle_size] must be at least [" + MAX_SHINGLE_SIZE_LOWER_BOUND + "] and at most " + "[" + MAX_SHINGLE_SIZE_UPPER_BOUND + "], got [" + v + "]"); diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java index b66e56d7e0d56..e4c278f6ddb9e 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java @@ -213,7 +213,7 @@ public static class Builder extends FieldMapper.Builder { final Parameter ignoreAbove = Parameter.intParam("ignore_above", true, m -> toType(m).ignoreAbove, Integer.MAX_VALUE) - .setValidator(v -> { + .addValidator(v -> { if (v < 0) { throw new IllegalArgumentException("[ignore_above] must be positive, got [" + v + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 3052d860a3a92..5f792661f4f2d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -133,7 +133,7 @@ public static class Builder extends FieldMapper.Builder { private final Parameter maxInputLength = Parameter.intParam("max_input_length", true, m -> builder(m).maxInputLength.get(), Defaults.DEFAULT_MAX_INPUT_LENGTH) .addDeprecatedName("max_input_len") - .setValidator(Builder::validateInputLength) + .addValidator(Builder::validateInputLength) .alwaysSerialize(); private final Parameter> meta = Parameter.metaParam(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompositeRuntimeField.java b/server/src/main/java/org/elasticsearch/index/mapper/CompositeRuntimeField.java index c841eef7768c8..78ab0f9d121a5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompositeRuntimeField.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompositeRuntimeField.java @@ -38,7 +38,7 @@ public class CompositeRuntimeField implements RuntimeField { () -> null, RuntimeField::parseScript, RuntimeField.initializerNotSupported() - ).setValidator(s -> { + ).addValidator(s -> { if (s == null) { throw new IllegalArgumentException("composite runtime field [" + name + "] must declare a [script]"); } @@ -50,7 +50,7 @@ public class CompositeRuntimeField implements RuntimeField { Collections::emptyMap, (f, p, o) -> parseFields(f, o), RuntimeField.initializerNotSupported() - ).setValidator(objectMap -> { + ).addValidator(objectMap -> { if (objectMap == null || objectMap.isEmpty()) { throw new IllegalArgumentException("composite runtime field [" + name + "] must declare its [fields]"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 9f5794b224d73..86dc9ec80e099 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -588,7 +588,7 @@ public static final class Parameter implements Supplier { private final TriFunction parser; private final Function initializer; private boolean acceptsNull = false; - private Consumer validator = null; + private List> validators = new ArrayList<>(); private Serializer serializer = XContentBuilder::field; private SerializerCheck serializerCheck = (includeDefaults, isConfigured, value) -> includeDefaults || isConfigured; private Function conflictSerializer = Objects::toString; @@ -683,10 +683,11 @@ public Parameter deprecated() { } /** - * Adds validation to a parameter, called after parsing and merging + * Adds validation to a parameter, called after parsing and merging. Multiple + * validators can be added and all of them will be executed. */ - public Parameter setValidator(Consumer validator) { - this.validator = validator; + public Parameter addValidator(Consumer validator) { + this.validators.add(validator); return this; } @@ -743,8 +744,9 @@ public Parameter precludesParameters(Parameter... ps) { } void validate() { - if (validator != null) { - validator.accept(getValue()); + // Iterate over the list of validators and execute them one by one. + for (Consumer v : validators) { + v.accept(getValue()); } if (this.isConfigured()) { for (Parameter p : requires) { @@ -895,7 +897,7 @@ public static Parameter restrictedStringParam(String name, boolean updat assert values.length > 0; Set acceptedValues = new LinkedHashSet<>(Arrays.asList(values)); return stringParam(name, updateable, initializer, values[0]) - .setValidator(v -> { + .addValidator(v -> { if (acceptedValues.contains(v)) { return; } @@ -1079,7 +1081,7 @@ protected void addScriptValidation( Parameter indexParam, Parameter docValuesParam ) { - scriptParam.setValidator(s -> { + scriptParam.addValidator(s -> { if (s != null && indexParam.get() == false && docValuesParam.get() == false) { throw new MapperParsingException("Cannot define script on field with index:false and doc_values:false"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index a95556a13bf87..a464ed9da9cd3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -90,7 +90,7 @@ public Builder(String name, ScriptCompiler scriptCompiler, boolean ignoreMalform this.script.precludesParameters(nullValue, ignoreMalformed); addScriptValidation(script, indexed, hasDocValues); this.dimension = Parameter.boolParam("dimension", false, m -> toType(m).dimension, false) - .setValidator(v -> { + .addValidator(v -> { if (v && (indexed.getValue() == false || hasDocValues.getValue() == false)) { throw new IllegalArgumentException( "Field [dimension] requires that [" + indexed.name + "] and [" + hasDocValues.name + "] are true" diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 530acfa8a917a..e08ad09eb875a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -124,7 +124,7 @@ public Builder(String name, IndexAnalyzers indexAnalyzers, ScriptCompiler script this.script.precludesParameters(nullValue); addScriptValidation(script, indexed, hasDocValues); - this.dimension = Parameter.boolParam("dimension", false, m -> toType(m).dimension, false).setValidator(v -> { + this.dimension = Parameter.boolParam("dimension", false, m -> toType(m).dimension, false).addValidator(v -> { if (v && (indexed.getValue() == false || hasDocValues.getValue() == false)) { throw new IllegalArgumentException( "Field [dimension] requires that [" + indexed.name + "] and [" + hasDocValues.name + "] are true" diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java index 08cf9308533d8..edf921fd5b72f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java @@ -179,7 +179,7 @@ public Builder(String name, Version version, boolean ignoreMalformedByDefault, b this.ignoreMalformed = ignoreMalformedParam(m -> builder(m).ignoreMalformed.get(), ignoreMalformedByDefault); this.coerce = coerceParam(m -> builder(m).coerce.get(), coerceByDefault); - this.pointsOnly.setValidator(v -> { + this.pointsOnly.addValidator(v -> { if (v == null) { return; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index e66f829c9cfc9..e0967504bbf80 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -115,7 +115,7 @@ public Builder(String name, NumberType type, ScriptCompiler compiler, boolean ig (n, c, o) -> o == null ? null : type.parse(o, false), m -> toType(m).nullValue).acceptsNull(); this.dimension = Parameter.boolParam("dimension", false, m -> toType(m).dimension, false) - .setValidator(v -> { + .addValidator(v -> { if (v && EnumSet.of(NumberType.INTEGER, NumberType.LONG, NumberType.BYTE, NumberType.SHORT).contains(type) == false) { throw new IllegalArgumentException("Parameter [dimension] cannot be set to numeric type [" + type.name + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextParams.java b/server/src/main/java/org/elasticsearch/index/mapper/TextParams.java index 2c82ccfcafc0f..3061c67cf694c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextParams.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextParams.java @@ -41,7 +41,7 @@ public Analyzers(IndexAnalyzers indexAnalyzers, m -> analyzerInitFunction.apply(m).indexAnalyzer.get(), indexAnalyzers::getDefaultIndexAnalyzer) .setSerializerCheck((id, ic, a) -> id || ic || Objects.equals(a, getSearchAnalyzer()) == false || Objects.equals(a, getSearchQuoteAnalyzer()) == false) - .setValidator(a -> a.checkAllowedInMode(AnalysisMode.INDEX_TIME)); + .addValidator(a -> a.checkAllowedInMode(AnalysisMode.INDEX_TIME)); this.searchAnalyzer = Parameter.analyzerParam("search_analyzer", true, m -> m.fieldType().getTextSearchInfo().getSearchAnalyzer(), () -> { @@ -54,7 +54,7 @@ public Analyzers(IndexAnalyzers indexAnalyzers, return indexAnalyzer.get(); }) .setSerializerCheck((id, ic, a) -> id || ic || Objects.equals(a, getSearchQuoteAnalyzer()) == false) - .setValidator(a -> a.checkAllowedInMode(AnalysisMode.SEARCH_TIME)); + .addValidator(a -> a.checkAllowedInMode(AnalysisMode.SEARCH_TIME)); this.searchQuoteAnalyzer = Parameter.analyzerParam("search_quote_analyzer", true, m -> m.fieldType().getTextSearchInfo().getSearchQuoteAnalyzer(), () -> { @@ -66,10 +66,10 @@ public Analyzers(IndexAnalyzers indexAnalyzers, } return searchAnalyzer.get(); }) - .setValidator(a -> a.checkAllowedInMode(AnalysisMode.SEARCH_TIME)); + .addValidator(a -> a.checkAllowedInMode(AnalysisMode.SEARCH_TIME)); this.positionIncrementGap = Parameter.intParam("position_increment_gap", false, m -> analyzerInitFunction.apply(m).positionIncrementGap.get(), TextFieldMapper.Defaults.POSITION_INCREMENT_GAP) - .setValidator(v -> { + .addValidator(v -> { if (v < 0) { throw new MapperParsingException("[position_increment_gap] must be positive, got [" + v + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index a67719a8f79f5..ab01c7225a1aa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -113,7 +113,7 @@ public static class Builder extends FieldMapper.Builder { final Parameter depthLimit = Parameter.intParam("depth_limit", true, m -> builder(m).depthLimit.get(), Defaults.DEPTH_LIMIT) - .setValidator(v -> { + .addValidator(v -> { if (v < 0) { throw new IllegalArgumentException("[depth_limit] must be positive, got [" + v + "]"); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java index 68b2ef1fe0917..de08ff84cf013 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java @@ -94,11 +94,16 @@ public static class Builder extends FieldMapper.Builder { }, m -> toType(m).wrapper).setSerializer((b, n, v) -> b.field(n, v.name), v -> "wrapper_" + v.name); final Parameter intValue = Parameter.intParam("int_value", true, m -> toType(m).intValue, 5) - .setValidator(n -> { + .addValidator(n -> { if (n > 50) { throw new IllegalArgumentException("Value of [n] cannot be greater than 50"); } }) + .addValidator(n -> { + if (n < 0) { + throw new IllegalArgumentException("Value of [n] cannot be less than 0"); + } + }) .setMergeValidator((o, n, c) -> n >= o); final Parameter analyzer = Parameter.analyzerParam("analyzer", false, m -> toType(m).analyzer, () -> Lucene.KEYWORD_ANALYZER); @@ -106,7 +111,7 @@ public static class Builder extends FieldMapper.Builder { = Parameter.analyzerParam("search_analyzer", true, m -> toType(m).searchAnalyzer, analyzer::getValue); final Parameter index = Parameter.boolParam("index", false, m -> toType(m).index, true); final Parameter required = Parameter.stringParam("required", true, m -> toType(m).required, null) - .setValidator(value -> { + .addValidator(value -> { if (value == null) { throw new IllegalArgumentException("field [required] must be specified"); } @@ -369,6 +374,10 @@ public void testParameterValidation() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> fromMapping("{\"type\":\"test_mapper\",\"int_value\":60,\"required\":\"value\"}")); assertEquals("Value of [n] cannot be greater than 50", e.getMessage()); + + IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, + () -> fromMapping("{\"type\":\"test_mapper\",\"int_value\":-60,\"required\":\"value\"}")); + assertEquals("Value of [n] cannot be less than 0", e2.getMessage()); } // test deprecations diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java index 8c428794d5d41..6f699ded7eb8b 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java @@ -124,7 +124,7 @@ public static class Builder extends FieldMapper.Builder { } } return parsedMetrics; - }, m -> toType(m).metrics).setValidator(v -> { + }, m -> toType(m).metrics).addValidator(v -> { if (v == null || v.isEmpty()) { throw new IllegalArgumentException("Property [" + Names.METRICS + "] is required for field [" + name() + "]."); } diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java index 3b0c5585ebc2a..fd85b32b0b489 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java @@ -55,7 +55,7 @@ public static class Builder extends FieldMapper.Builder { Parameter dims = new Parameter<>("dims", false, () -> null, (n, c, o) -> XContentMapValues.nodeIntegerValue(o), m -> toType(m).dims) - .setValidator(dims -> { + .addValidator(dims -> { if (dims == null) { throw new MapperParsingException("Missing required parameter [dims] for field [" + name + "]"); } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 4cf488c790e2b..46d477cc54635 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -193,7 +193,7 @@ public static class Builder extends FieldMapper.Builder { final Parameter ignoreAbove = Parameter.intParam("ignore_above", true, m -> toType(m).ignoreAbove, Defaults.IGNORE_ABOVE) - .setValidator(v -> { + .addValidator(v -> { if (v < 0) { throw new IllegalArgumentException("[ignore_above] must be positive, got [" + v + "]"); } @@ -333,7 +333,7 @@ public Query wildcardQuery(String wildcardPattern, RewriteMethod method, boolean if (clauseCount > 0) { // We can accelerate execution with the ngram query BooleanQuery approxQuery = rewritten.build(); - return new BinaryDvConfirmedAutomatonQuery(approxQuery, name(), wildcardPattern, automaton); + return new BinaryDvConfirmedAutomatonQuery(approxQuery, name(), wildcardPattern, automaton); } else if (numWildcardChars == 0 || numWildcardStrings > 0) { // We have no concrete characters and we're not a pure length query e.g. ??? return new DocValuesFieldExistsQuery(name()); @@ -365,11 +365,11 @@ public Query regexpQuery(String value, int syntaxFlags, int matchFlags, int maxD // MatchAllButRequireVerificationQuery is a special case meaning the regex is reduced to a single // clause which we can't accelerate at all and needs verification. Example would be ".." if (approxNgramQuery instanceof MatchAllButRequireVerificationQuery) { - return new BinaryDvConfirmedAutomatonQuery(new MatchAllDocsQuery(), name(), value, automaton); + return new BinaryDvConfirmedAutomatonQuery(new MatchAllDocsQuery(), name(), value, automaton); } // We can accelerate execution with the ngram query - return new BinaryDvConfirmedAutomatonQuery(approxNgramQuery, name(), value, automaton); + return new BinaryDvConfirmedAutomatonQuery(approxNgramQuery, name(), value, automaton); } // Convert a regular expression to a simplified query consisting of BooleanQuery and TermQuery objects @@ -740,9 +740,9 @@ public Query rangeQuery( if (accelerationQuery == null) { return new BinaryDvConfirmedAutomatonQuery(new MatchAllDocsQuery(), - name(), lower + "-" + upper, automaton); + name(), lower + "-" + upper, automaton); } - return new BinaryDvConfirmedAutomatonQuery(accelerationQuery, name(), lower + "-" + upper, automaton); + return new BinaryDvConfirmedAutomatonQuery(accelerationQuery, name(), lower + "-" + upper, automaton); } @Override @@ -822,10 +822,10 @@ public Query fuzzyQuery( ); if (ngramQ.clauses().size() == 0) { return new BinaryDvConfirmedAutomatonQuery(new MatchAllDocsQuery(), - name(), searchTerm, fq.getAutomata().automaton); + name(), searchTerm, fq.getAutomata().automaton); } - return new BinaryDvConfirmedAutomatonQuery(ngramQ, name(), searchTerm, fq.getAutomata().automaton); + return new BinaryDvConfirmedAutomatonQuery(ngramQ, name(), searchTerm, fq.getAutomata().automaton); } catch (IOException ioe) { throw new ElasticsearchParseException("Error parsing wildcard field fuzzy string [" + searchTerm + "]"); } From 648a7aefbeda34d2e5b98583037aa52ae3c086f0 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 1 Sep 2021 07:05:56 +0200 Subject: [PATCH 048/128] Replace Lucene DataInput/DataOutput with Elasticsearch StreamInput/StreamOutput when reading/writing geo_shape doc values (#76162) This commit introduces a new ByteArrayStreamInput that allows reusability and random reads. --- .../io/stream/ByteArrayStreamInput.java | 88 ++++++++++++ ...eamTests.java => AbstractStreamTests.java} | 132 ++++++++---------- .../io/stream/ByteArrayStreamInputTests.java | 22 +++ .../io/stream/ByteBufferStreamInputTests.java | 23 +++ .../BytesReferenceStreamInputTests.java | 20 +++ .../stream/InputStreamStreamInputTests.java | 20 +++ .../index/fielddata/DimensionalShapeType.java | 8 +- .../xpack/spatial/index/fielddata/Extent.java | 8 +- .../index/fielddata/GeoShapeValues.java | 10 +- .../fielddata/GeometryDocValueReader.java | 20 +-- .../fielddata/GeometryDocValueWriter.java | 6 +- .../index/fielddata/TriangleTreeReader.java | 19 +-- .../index/fielddata/TriangleTreeWriter.java | 21 +-- .../geogrid/AbstractGeoHashGridTiler.java | 13 +- .../geogrid/AbstractGeoTileGridTiler.java | 10 +- .../bucket/geogrid/GeoGridTiler.java | 4 +- .../index/fielddata/Tile2DVisitorTests.java | 2 +- .../index/fielddata/TriangleTreeTests.java | 11 +- .../bucket/geogrid/GeoHashTilerTests.java | 3 +- .../xpack/spatial/util/GeoTestUtils.java | 2 +- 20 files changed, 310 insertions(+), 132 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java rename server/src/test/java/org/elasticsearch/common/io/stream/{StreamTests.java => AbstractStreamTests.java} (85%) create mode 100644 server/src/test/java/org/elasticsearch/common/io/stream/ByteArrayStreamInputTests.java create mode 100644 server/src/test/java/org/elasticsearch/common/io/stream/ByteBufferStreamInputTests.java create mode 100644 server/src/test/java/org/elasticsearch/common/io/stream/BytesReferenceStreamInputTests.java create mode 100644 server/src/test/java/org/elasticsearch/common/io/stream/InputStreamStreamInputTests.java diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java new file mode 100644 index 0000000000000..eb33a0b11fdbd --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.io.stream; + +import org.apache.lucene.util.BytesRef; + +import java.io.EOFException; +import java.io.IOException; + +/** + * Resettable {@link StreamInput} that wraps a byte array. It is heavily inspired in Lucene's + * {@link org.apache.lucene.store.ByteArrayDataInput}. + */ +public class ByteArrayStreamInput extends StreamInput { + + private byte[] bytes; + private int pos; + private int limit; + + public ByteArrayStreamInput() { + reset(BytesRef.EMPTY_BYTES); + } + + @Override + public int read() throws IOException { + return readByte() & 0xFF; + } + + public void reset(byte[] bytes) { + reset(bytes, 0, bytes.length); + } + + public int getPosition() { + return pos; + } + + public void setPosition(int pos) { + this.pos = pos; + } + + public void reset(byte[] bytes, int offset, int len) { + this.bytes = bytes; + pos = offset; + limit = offset + len; + } + + public int length() { + return limit; + } + + public void skipBytes(long count) { + pos += count; + } + + @Override + public void close() { + // No-op + } + + @Override + public int available() { + return limit - pos; + } + + @Override + protected void ensureCanReadBytes(int length) throws EOFException { + if (pos + length > limit) { + throw new EOFException("tried to read: " + length + " bytes but only " + available() + " remaining"); + } + } + + @Override + public byte readByte() { + return bytes[pos++]; + } + + @Override + public void readBytes(byte[] b, int offset, int len) { + System.arraycopy(bytes, pos, b, offset, len); + pos += len; + } +} diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/AbstractStreamTests.java similarity index 85% rename from server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java rename to server/src/test/java/org/elasticsearch/common/io/stream/AbstractStreamTests.java index 95d27214c0517..c5d5c41820d83 100644 --- a/server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java +++ b/server/src/test/java/org/elasticsearch/common/io/stream/AbstractStreamTests.java @@ -10,17 +10,17 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.CheckedBiConsumer; -import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; -import java.io.ByteArrayInputStream; import java.io.EOFException; import java.io.IOException; +import java.nio.ByteBuffer; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; @@ -45,7 +45,9 @@ import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.nullValue; -public class StreamTests extends ESTestCase { +public abstract class AbstractStreamTests extends ESTestCase { + + protected abstract StreamInput getStreamInput(BytesReference bytesReference) throws IOException; public void testBooleanSerialization() throws IOException { final BytesStreamOutput output = new BytesStreamOutput(); @@ -60,7 +62,7 @@ public void testBooleanSerialization() throws IOException { assertThat(bytes[0], equalTo((byte) 0)); assertThat(bytes[1], equalTo((byte) 1)); - final StreamInput input = bytesReference.streamInput(); + final StreamInput input = getStreamInput(bytesReference); assertFalse(input.readBoolean()); assertTrue(input.readBoolean()); @@ -69,7 +71,7 @@ public void testBooleanSerialization() throws IOException { set.remove((byte) 1); final byte[] corruptBytes = new byte[]{randomFrom(set)}; final BytesReference corrupt = new BytesArray(corruptBytes); - final IllegalStateException e = expectThrows(IllegalStateException.class, () -> corrupt.streamInput().readBoolean()); + final IllegalStateException e = expectThrows(IllegalStateException.class, () -> getStreamInput(corrupt).readBoolean()); final String message = String.format(Locale.ROOT, "unexpected byte [0x%02x]", corruptBytes[0]); assertThat(e, hasToString(containsString(message))); } @@ -89,7 +91,7 @@ public void testOptionalBooleanSerialization() throws IOException { assertThat(bytes[1], equalTo((byte) 1)); assertThat(bytes[2], equalTo((byte) 2)); - final StreamInput input = bytesReference.streamInput(); + final StreamInput input = getStreamInput(bytesReference); final Boolean maybeFalse = input.readOptionalBoolean(); assertNotNull(maybeFalse); assertFalse(maybeFalse); @@ -104,7 +106,7 @@ public void testOptionalBooleanSerialization() throws IOException { set.remove((byte) 2); final byte[] corruptBytes = new byte[]{randomFrom(set)}; final BytesReference corrupt = new BytesArray(corruptBytes); - final IllegalStateException e = expectThrows(IllegalStateException.class, () -> corrupt.streamInput().readOptionalBoolean()); + final IllegalStateException e = expectThrows(IllegalStateException.class, () -> getStreamInput(corrupt).readOptionalBoolean()); final String message = String.format(Locale.ROOT, "unexpected byte [0x%02x]", corruptBytes[0]); assertThat(e, hasToString(containsString(message))); } @@ -114,7 +116,7 @@ public void testRandomVLongSerialization() throws IOException { long write = randomLong(); BytesStreamOutput out = new BytesStreamOutput(); out.writeZLong(write); - long read = out.bytes().streamInput().readZLong(); + long read = getStreamInput(out.bytes()).readZLong(); assertEquals(write, read); } } @@ -136,7 +138,7 @@ public void testSpecificVLongSerialization() throws IOException { out.writeZLong(value.v1()); assertArrayEquals(Long.toString(value.v1()), value.v2(), BytesReference.toBytes(out.bytes())); BytesReference bytes = new BytesArray(value.v2()); - assertEquals(Arrays.toString(value.v2()), (long) value.v1(), bytes.streamInput().readZLong()); + assertEquals(Arrays.toString(value.v2()), (long) value.v1(), getStreamInput(bytes).readZLong()); } } @@ -161,7 +163,7 @@ public void testLinkedHashMap() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); out.writeGenericValue(write); @SuppressWarnings("unchecked") - LinkedHashMap read = (LinkedHashMap) out.bytes().streamInput().readGenericValue(); + LinkedHashMap read = (LinkedHashMap) getStreamInput(out.bytes()).readGenericValue(); assertEquals(size, read.size()); int index = 0; for (Map.Entry entry : read.entrySet()) { @@ -171,48 +173,6 @@ public void testLinkedHashMap() throws IOException { } } - public void testFilterStreamInputDelegatesAvailable() throws IOException { - final int length = randomIntBetween(1, 1024); - StreamInput delegate = StreamInput.wrap(new byte[length]); - - FilterStreamInput filterInputStream = new FilterStreamInput(delegate) { - }; - assertEquals(filterInputStream.available(), length); - - // read some bytes - final int bytesToRead = randomIntBetween(1, length); - filterInputStream.readBytes(new byte[bytesToRead], 0, bytesToRead); - assertEquals(filterInputStream.available(), length - bytesToRead); - } - - public void testInputStreamStreamInputDelegatesAvailable() throws IOException { - final int length = randomIntBetween(1, 1024); - ByteArrayInputStream is = new ByteArrayInputStream(new byte[length]); - InputStreamStreamInput streamInput = new InputStreamStreamInput(is); - assertEquals(streamInput.available(), length); - - // read some bytes - final int bytesToRead = randomIntBetween(1, length); - streamInput.readBytes(new byte[bytesToRead], 0, bytesToRead); - assertEquals(streamInput.available(), length - bytesToRead); - } - - public void testReadArraySize() throws IOException { - BytesStreamOutput stream = new BytesStreamOutput(); - byte[] array = new byte[randomIntBetween(1, 10)]; - for (int i = 0; i < array.length; i++) { - array[i] = randomByte(); - } - stream.writeByteArray(array); - InputStreamStreamInput streamInput = new InputStreamStreamInput(StreamInput.wrap(BytesReference.toBytes(stream.bytes())), array - .length - 1); - expectThrows(EOFException.class, streamInput::readByteArray); - streamInput = new InputStreamStreamInput(StreamInput.wrap(BytesReference.toBytes(stream.bytes())), BytesReference.toBytes(stream - .bytes()).length); - - assertArrayEquals(array, streamInput.readByteArray()); - } - public void testWritableArrays() throws IOException { final String[] strings = generateRandomStringArray(10, 10, false, true); WriteableString[] sourceArray = Arrays.stream(strings).map(WriteableString::new).toArray(WriteableString[]::new); @@ -224,10 +184,10 @@ public void testWritableArrays() throws IOException { sourceArray = null; } out.writeOptionalArray(sourceArray); - targetArray = out.bytes().streamInput().readOptionalArray(WriteableString::new, WriteableString[]::new); + targetArray = getStreamInput(out.bytes()).readOptionalArray(WriteableString::new, WriteableString[]::new); } else { out.writeArray(sourceArray); - targetArray = out.bytes().streamInput().readArray(WriteableString::new, WriteableString[]::new); + targetArray = getStreamInput(out.bytes()).readArray(WriteableString::new, WriteableString[]::new); } assertThat(targetArray, equalTo(sourceArray)); @@ -246,11 +206,11 @@ public void testArrays() throws IOException { strings = generateRandomStringArray(10, 10, false, true); } out.writeOptionalArray(writer, strings); - deserialized = out.bytes().streamInput().readOptionalArray(reader, String[]::new); + deserialized = getStreamInput(out.bytes()).readOptionalArray(reader, String[]::new); } else { strings = generateRandomStringArray(10, 10, false, true); out.writeArray(writer, strings); - deserialized = out.bytes().streamInput().readArray(reader, String[]::new); + deserialized = getStreamInput(out.bytes()).readArray(reader, String[]::new); } assertThat(deserialized, equalTo(strings)); } @@ -293,7 +253,7 @@ public int hashCode() { } runWriteReadCollectionTest( - () -> new FooBar(randomInt(), randomInt()), StreamOutput::writeCollection, in -> in.readList(FooBar::new)); + () -> new FooBar(randomInt(), randomInt()), StreamOutput::writeCollection, in -> in.readList(FooBar::new)); } public void testStringCollection() throws IOException { @@ -301,9 +261,9 @@ public void testStringCollection() throws IOException { } private void runWriteReadCollectionTest( - final Supplier supplier, - final CheckedBiConsumer, IOException> writer, - final CheckedFunction, IOException> reader) throws IOException { + final Supplier supplier, + final CheckedBiConsumer, IOException> writer, + final CheckedFunction, IOException> reader) throws IOException { final int length = randomIntBetween(0, 10); final Collection collection = new ArrayList<>(length); for (int i = 0; i < length; i++) { @@ -311,7 +271,7 @@ private void runWriteReadCollectionTest( } try (BytesStreamOutput out = new BytesStreamOutput()) { writer.accept(out, collection); - try (StreamInput in = out.bytes().streamInput()) { + try (StreamInput in = getStreamInput(out.bytes())) { assertThat(collection, equalTo(reader.apply(in))); } } @@ -328,7 +288,7 @@ public void testSetOfLongs() throws IOException { final BytesStreamOutput out = new BytesStreamOutput(); out.writeCollection(sourceSet, StreamOutput::writeLong); - final Set targetSet = out.bytes().streamInput().readSet(StreamInput::readLong); + final Set targetSet = getStreamInput(out.bytes()).readSet(StreamInput::readLong); assertThat(targetSet, equalTo(sourceSet)); } @@ -336,7 +296,7 @@ public void testInstantSerialization() throws IOException { final Instant instant = Instant.now(); try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeInstant(instant); - try (StreamInput in = out.bytes().streamInput()) { + try (StreamInput in = getStreamInput(out.bytes())) { final Instant serialized = in.readInstant(); assertEquals(instant, serialized); } @@ -347,7 +307,7 @@ public void testOptionalInstantSerialization() throws IOException { final Instant instant = Instant.now(); try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeOptionalInstant(instant); - try (StreamInput in = out.bytes().streamInput()) { + try (StreamInput in = getStreamInput(out.bytes())) { final Instant serialized = in.readOptionalInstant(); assertEquals(instant, serialized); } @@ -356,7 +316,7 @@ public void testOptionalInstantSerialization() throws IOException { final Instant missing = null; try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeOptionalInstant(missing); - try (StreamInput in = out.bytes().streamInput()) { + try (StreamInput in = getStreamInput(out.bytes())) { final Instant serialized = in.readOptionalInstant(); assertEquals(missing, serialized); } @@ -406,7 +366,7 @@ public void testSecureStringSerialization() throws IOException { output.writeSecureString(secureString); final BytesReference bytesReference = output.bytes(); - final StreamInput input = bytesReference.streamInput(); + final StreamInput input = getStreamInput(bytesReference); assertThat(secureString, is(equalTo(input.readSecureString()))); } @@ -416,7 +376,7 @@ public void testSecureStringSerialization() throws IOException { output.writeOptionalSecureString(secureString); final BytesReference bytesReference = output.bytes(); - final StreamInput input = bytesReference.streamInput(); + final StreamInput input = getStreamInput(bytesReference); if (secureString != null) { assertThat(input.readOptionalSecureString(), is(equalTo(secureString))); @@ -435,6 +395,36 @@ public void testGenericSet() throws IOException { assertGenericRoundtrip(new LinkedHashSet<>(list)); } + public void testReadArraySize() throws IOException { + BytesStreamOutput stream = new BytesStreamOutput(); + byte[] array = new byte[randomIntBetween(1, 10)]; + for (int i = 0; i < array.length; i++) { + array[i] = randomByte(); + } + stream.writeByteArray(array); + StreamInput streamInput = new InputStreamStreamInput(getStreamInput(stream.bytes()), array + .length - 1); + expectThrows(EOFException.class, streamInput::readByteArray); + streamInput = new InputStreamStreamInput(getStreamInput(stream.bytes()), BytesReference.toBytes(stream + .bytes()).length); + + assertArrayEquals(array, streamInput.readByteArray()); + } + + public void testFilterStreamInputDelegatesAvailable() throws IOException { + final int length = randomIntBetween(1, 1024); + StreamInput delegate = getStreamInput(BytesReference.fromByteBuffer(ByteBuffer.wrap(new byte[length]))); + + FilterStreamInput filterInputStream = new FilterStreamInput(delegate) { + }; + assertEquals(filterInputStream.available(), length); + + // read some bytes + final int bytesToRead = randomIntBetween(1, length); + filterInputStream.readBytes(new byte[bytesToRead], 0, bytesToRead); + assertEquals(filterInputStream.available(), length - bytesToRead); + } + private static class Unwriteable {} private void assertNotWriteable(Object o, Class type) { @@ -470,8 +460,7 @@ private void assertSerialization(CheckedConsumer outp CheckedConsumer inputAssertions) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { outputAssertions.accept(output); - final BytesReference bytesReference = output.bytes(); - final StreamInput input = bytesReference.streamInput(); + final StreamInput input = getStreamInput(output.bytes()); inputAssertions.accept(input); } } @@ -484,5 +473,4 @@ private void assertGenericRoundtrip(Object original) throws IOException { assertThat(read, equalTo(original)); }); } - } diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/ByteArrayStreamInputTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/ByteArrayStreamInputTests.java new file mode 100644 index 0000000000000..13b6312b771bc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/io/stream/ByteArrayStreamInputTests.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.io.stream; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; + +public class ByteArrayStreamInputTests extends AbstractStreamTests { + @Override + protected StreamInput getStreamInput(BytesReference bytesReference) { + final BytesRef bytesRef = bytesReference.toBytesRef(); + final ByteArrayStreamInput byteArrayStreamInput = new ByteArrayStreamInput(); + byteArrayStreamInput.reset(bytesRef.bytes, bytesRef.offset, bytesRef.length); + return byteArrayStreamInput; + } +} diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/ByteBufferStreamInputTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/ByteBufferStreamInputTests.java new file mode 100644 index 0000000000000..12040f6415064 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/io/stream/ByteBufferStreamInputTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.io.stream; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; + +import java.io.IOException; +import java.nio.ByteBuffer; + +public class ByteBufferStreamInputTests extends AbstractStreamTests { + @Override + protected StreamInput getStreamInput(BytesReference bytesReference) throws IOException { + final BytesRef bytesRef = bytesReference.toBytesRef(); + return new ByteBufferStreamInput(ByteBuffer.wrap(bytesRef.bytes, bytesRef.offset, bytesRef.length)); + } +} diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/BytesReferenceStreamInputTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/BytesReferenceStreamInputTests.java new file mode 100644 index 0000000000000..cd727e247b8f8 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/io/stream/BytesReferenceStreamInputTests.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.io.stream; + +import org.elasticsearch.common.bytes.BytesReference; + +import java.io.IOException; + +public class BytesReferenceStreamInputTests extends AbstractStreamTests { + @Override + protected StreamInput getStreamInput(BytesReference bytesReference) throws IOException { + return bytesReference.streamInput(); + } +} diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/InputStreamStreamInputTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/InputStreamStreamInputTests.java new file mode 100644 index 0000000000000..89503c78a4785 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/io/stream/InputStreamStreamInputTests.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.io.stream; + +import org.elasticsearch.common.bytes.BytesReference; + +import java.io.IOException; + +public class InputStreamStreamInputTests extends AbstractStreamTests { + @Override + protected StreamInput getStreamInput(BytesReference bytesReference) throws IOException { + return new InputStreamStreamInput(StreamInput.wrap(BytesReference.toBytes(bytesReference))); + } +} diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/DimensionalShapeType.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/DimensionalShapeType.java index f9e1ee39536b5..4cf8895893738 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/DimensionalShapeType.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/DimensionalShapeType.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.spatial.index.fielddata; -import org.apache.lucene.store.ByteArrayDataInput; -import org.apache.lucene.store.ByteBuffersDataOutput; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.geometry.GeometryCollection; import org.elasticsearch.geometry.ShapeType; @@ -29,11 +29,11 @@ public static DimensionalShapeType fromOrdinalByte(byte ordinal) { return values[Byte.toUnsignedInt(ordinal)]; } - public void writeTo(ByteBuffersDataOutput out) { + public void writeTo(BytesStreamOutput out) { out.writeByte((byte) ordinal()); } - public static DimensionalShapeType readFrom(ByteArrayDataInput in) { + public static DimensionalShapeType readFrom(ByteArrayStreamInput in) { return fromOrdinalByte(in.readByte()); } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Extent.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Extent.java index 709aec621ed33..ce6dab1578d91 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Extent.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Extent.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.spatial.index.fielddata; -import org.apache.lucene.store.ByteArrayDataInput; -import org.apache.lucene.store.ByteBuffersDataOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; import java.util.Objects; @@ -88,7 +88,7 @@ public void addRectangle(int bottomLeftX, int bottomLeftY, int topRightX, int to } } - static void readFromCompressed(ByteArrayDataInput input, Extent extent) { + static void readFromCompressed(StreamInput input, Extent extent) throws IOException { final int top = input.readInt(); final int bottom = Math.toIntExact(top - input.readVLong()); final int negLeft; @@ -133,7 +133,7 @@ static void readFromCompressed(ByteArrayDataInput input, Extent extent) { extent.reset(top, bottom, negLeft, negRight, posLeft, posRight); } - void writeCompressed(ByteBuffersDataOutput output) throws IOException { + void writeCompressed(StreamOutput output) throws IOException { output.writeInt(this.top); output.writeVLong((long) this.top - this.bottom); byte type; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java index f7a6e7f926060..6e52d1ea3c728 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java @@ -98,7 +98,7 @@ public GeoShapeValue() { /** * reset the geometry. */ - public void reset(BytesRef bytesRef) { + public void reset(BytesRef bytesRef) throws IOException { this.reader.reset(bytesRef); this.boundingBox.reset(reader.getExtent(), CoordinateEncoder.GEO); } @@ -107,7 +107,7 @@ public BoundingBox boundingBox() { return boundingBox; } - public GeoRelation relate(Rectangle rectangle) { + public GeoRelation relate(Rectangle rectangle) throws IOException { int minX = CoordinateEncoder.GEO.encodeX(rectangle.getMinX()); int maxX = CoordinateEncoder.GEO.encodeX(rectangle.getMaxX()); int minY = CoordinateEncoder.GEO.encodeY(rectangle.getMinY()); @@ -121,21 +121,21 @@ public DimensionalShapeType dimensionalShapeType() { return reader.getDimensionalShapeType(); } - public double weight() { + public double weight() throws IOException { return reader.getSumCentroidWeight(); } /** * @return the latitude of the centroid of the shape */ - public double lat() { + public double lat() throws IOException { return CoordinateEncoder.GEO.decodeY(reader.getCentroidY()); } /** * @return the longitude of the centroid of the shape */ - public double lon() { + public double lon() throws IOException { return CoordinateEncoder.GEO.decodeX(reader.getCentroidX()); } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueReader.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueReader.java index 3d91926ab348b..ec19ccb8179cd 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueReader.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueReader.java @@ -7,8 +7,10 @@ package org.elasticsearch.xpack.spatial.index.fielddata; -import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; + +import java.io.IOException; /** * A reusable Geometry doc value reader for a previous serialized {@link org.elasticsearch.geometry.Geometry} using @@ -34,20 +36,20 @@ * ----------------------------------------- */ public class GeometryDocValueReader { - private final ByteArrayDataInput input; + private final ByteArrayStreamInput input; private final Extent extent; private int treeOffset; private int docValueOffset; public GeometryDocValueReader() { this.extent = new Extent(); - this.input = new ByteArrayDataInput(); + this.input = new ByteArrayStreamInput(); } /** * reset the geometry. */ - public void reset(BytesRef bytesRef) { + public void reset(BytesRef bytesRef) throws IOException { this.input.reset(bytesRef.bytes, bytesRef.offset, bytesRef.length); docValueOffset = bytesRef.offset; treeOffset = 0; @@ -56,7 +58,7 @@ public void reset(BytesRef bytesRef) { /** * returns the {@link Extent} of this geometry. */ - protected Extent getExtent() { + protected Extent getExtent() throws IOException { if (treeOffset == 0) { getSumCentroidWeight(); // skip CENTROID_HEADER + var-long sum-weight Extent.readFromCompressed(input, extent); @@ -70,7 +72,7 @@ protected Extent getExtent() { /** * returns the encoded X coordinate of the centroid. */ - protected int getCentroidX() { + protected int getCentroidX() throws IOException { input.setPosition(docValueOffset + 0); return input.readInt(); } @@ -78,7 +80,7 @@ protected int getCentroidX() { /** * returns the encoded Y coordinate of the centroid. */ - protected int getCentroidY() { + protected int getCentroidY() throws IOException { input.setPosition(docValueOffset + 4); return input.readInt(); } @@ -88,7 +90,7 @@ protected DimensionalShapeType getDimensionalShapeType() { return DimensionalShapeType.readFrom(input); } - protected double getSumCentroidWeight() { + protected double getSumCentroidWeight() throws IOException { input.setPosition(docValueOffset + 9); return Double.longBitsToDouble(input.readVLong()); } @@ -96,7 +98,7 @@ protected double getSumCentroidWeight() { /** * Visit the triangle tree with the provided visitor */ - public void visit(TriangleTreeReader.Visitor visitor) { + public void visit(TriangleTreeReader.Visitor visitor) throws IOException { Extent extent = getExtent(); int thisMaxX = extent.maxX(); int thisMinX = extent.minX(); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueWriter.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueWriter.java index d44f99c3dfd3b..393ec9ccbee06 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueWriter.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueWriter.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.spatial.index.fielddata; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import java.io.IOException; import java.util.List; @@ -27,13 +27,13 @@ private GeometryDocValueWriter() { public static BytesRef write(List fields, CoordinateEncoder coordinateEncoder, CentroidCalculator centroidCalculator) throws IOException { - final ByteBuffersDataOutput out = new ByteBuffersDataOutput(); + final BytesStreamOutput out = new BytesStreamOutput(); // normalization may be required due to floating point precision errors out.writeInt(coordinateEncoder.encodeX(coordinateEncoder.normalizeX(centroidCalculator.getX()))); out.writeInt(coordinateEncoder.encodeY(coordinateEncoder.normalizeY(centroidCalculator.getY()))); centroidCalculator.getDimensionalShapeType().writeTo(out); out.writeVLong(Double.doubleToLongBits(centroidCalculator.sumWeight())); TriangleTreeWriter.writeTo(out, fields); - return new BytesRef(out.toArrayCopy(), 0, Math.toIntExact(out.size())); + return out.bytes().toBytesRef(); } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeReader.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeReader.java index 530b1345672e1..24d791f513fe3 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeReader.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeReader.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.spatial.index.fielddata; -import org.apache.lucene.store.ByteArrayDataInput; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; + +import java.io.IOException; /** * A tree reader for a previous serialized {@link org.elasticsearch.geometry.Geometry} using @@ -24,12 +26,13 @@ private TriangleTreeReader() { /** * Visit the Triangle tree using the {@link Visitor} provided. */ - public static void visit(ByteArrayDataInput input, TriangleTreeReader.Visitor visitor, int thisMaxX, int thisMaxY) { + public static void visit(ByteArrayStreamInput input, TriangleTreeReader.Visitor visitor, int thisMaxX, int thisMaxY) + throws IOException { visit(input, visitor, true, thisMaxX, thisMaxY, true); } - private static boolean visit(ByteArrayDataInput input, TriangleTreeReader.Visitor visitor, - boolean splitX, int thisMaxX, int thisMaxY, boolean isRoot) { + private static boolean visit(ByteArrayStreamInput input, TriangleTreeReader.Visitor visitor, + boolean splitX, int thisMaxX, int thisMaxY, boolean isRoot) throws IOException { byte metadata = input.readByte(); int thisMinX; int thisMinY; @@ -82,8 +85,8 @@ private static boolean visit(ByteArrayDataInput input, TriangleTreeReader.Visito return visitor.push(); } - private static boolean pushLeft(ByteArrayDataInput input, TriangleTreeReader.Visitor visitor, - int thisMaxX, int thisMaxY, boolean splitX) { + private static boolean pushLeft(ByteArrayStreamInput input, TriangleTreeReader.Visitor visitor, + int thisMaxX, int thisMaxY, boolean splitX) throws IOException { int nextMaxX = Math.toIntExact(thisMaxX - input.readVLong()); int nextMaxY = Math.toIntExact(thisMaxY - input.readVLong()); int size = input.readVInt(); @@ -95,8 +98,8 @@ private static boolean pushLeft(ByteArrayDataInput input, TriangleTreeReader.Vis } } - private static boolean pushRight(ByteArrayDataInput input, TriangleTreeReader.Visitor visitor, int thisMaxX, - int thisMaxY, int thisMinX, int thisMinY, boolean splitX, int rightSize) { + private static boolean pushRight(ByteArrayStreamInput input, TriangleTreeReader.Visitor visitor, int thisMaxX, + int thisMaxY, int thisMinX, int thisMinY, boolean splitX, int rightSize) throws IOException { if ((splitX == false && visitor.pushY(thisMinY)) || (splitX && visitor.pushX(thisMinX))) { int nextMaxX = Math.toIntExact(thisMaxX - input.readVLong()); int nextMaxY = Math.toIntExact(thisMaxY - input.readVLong()); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java index c04476e4a8c9b..5a637709deca2 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java @@ -9,9 +9,10 @@ import org.apache.lucene.document.ShapeField; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; import java.util.Comparator; @@ -27,7 +28,7 @@ private TriangleTreeWriter() { } /*** Serialize the interval tree in the provided data output */ - public static void writeTo(ByteBuffersDataOutput out, List fields) throws IOException { + public static void writeTo(StreamOutput out, List fields) throws IOException { final Extent extent = new Extent(); final TriangleTreeNode node = build(fields, extent); ; extent.writeCompressed(out); @@ -114,8 +115,8 @@ private TriangleTreeNode(ShapeField.DecodedTriangle component) { this.component = component; } - private void writeTo(ByteBuffersDataOutput out) throws IOException { - ByteBuffersDataOutput scratchBuffer = ByteBuffersDataOutput.newResettableInstance(); + private void writeTo(StreamOutput out) throws IOException { + BytesStreamOutput scratchBuffer = new BytesStreamOutput(); writeMetadata(out); writeComponent(out); if (left != null) { @@ -126,8 +127,8 @@ private void writeTo(ByteBuffersDataOutput out) throws IOException { } } - private void writeNode(ByteBuffersDataOutput out, int parentMaxX, int parentMaxY, - ByteBuffersDataOutput scratchBuffer) throws IOException { + private void writeNode(StreamOutput out, int parentMaxX, int parentMaxY, + BytesStreamOutput scratchBuffer) throws IOException { out.writeVLong((long) parentMaxX - maxX); out.writeVLong((long) parentMaxY - maxY); int size = nodeSize(false, parentMaxX, parentMaxY, scratchBuffer); @@ -144,7 +145,7 @@ private void writeNode(ByteBuffersDataOutput out, int parentMaxX, int parentMaxY } } - private void writeMetadata(ByteBuffersDataOutput out) { + private void writeMetadata(StreamOutput out) throws IOException { byte metadata = 0; metadata |= (left != null) ? (1 << 0) : 0; metadata |= (right != null) ? (1 << 1) : 0; @@ -161,7 +162,7 @@ private void writeMetadata(ByteBuffersDataOutput out) { out.writeByte(metadata); } - private void writeComponent(ByteBuffersDataOutput out) throws IOException { + private void writeComponent(StreamOutput out) throws IOException { out.writeVLong((long) maxX - component.aX); out.writeVLong((long) maxY - component.aY); if (component.type == ShapeField.DecodedTriangle.TYPE.POINT) { @@ -176,7 +177,7 @@ private void writeComponent(ByteBuffersDataOutput out) throws IOException { out.writeVLong((long) maxY - component.cY); } - private int nodeSize(boolean includeBox, int parentMaxX, int parentMaxY, ByteBuffersDataOutput scratchBuffer) throws IOException { + private int nodeSize(boolean includeBox, int parentMaxX, int parentMaxY, BytesStreamOutput scratchBuffer) throws IOException { int size =0; size++; //metadata size += componentSize(scratchBuffer); @@ -201,7 +202,7 @@ private int nodeSize(boolean includeBox, int parentMaxX, int parentMaxY, ByteBuf return size; } - private int componentSize(ByteBuffersDataOutput scratchBuffer) throws IOException { + private int componentSize(BytesStreamOutput scratchBuffer) throws IOException { scratchBuffer.reset(); if (component.type == ShapeField.DecodedTriangle.TYPE.POINT) { scratchBuffer.writeVLong((long) maxX - component.aX); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/AbstractGeoHashGridTiler.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/AbstractGeoHashGridTiler.java index a9becd43fd6ec..dbdafa30fdf82 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/AbstractGeoHashGridTiler.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/AbstractGeoHashGridTiler.java @@ -11,6 +11,8 @@ import org.elasticsearch.xpack.spatial.index.fielddata.GeoRelation; import org.elasticsearch.xpack.spatial.index.fielddata.GeoShapeValues; +import java.io.IOException; + /** * Implements most of the logic for the GeoHash aggregation. */ @@ -29,7 +31,7 @@ public long encode(double x, double y) { } @Override - public int setValues(GeoShapeCellValues values, GeoShapeValues.GeoShapeValue geoValue) { + public int setValues(GeoShapeCellValues values, GeoShapeValues.GeoShapeValue geoValue) throws IOException { if (precision == 0) { return 1; @@ -48,7 +50,7 @@ public int setValues(GeoShapeCellValues values, GeoShapeValues.GeoShapeValue geo } protected int setValuesByBruteForceScan(GeoShapeCellValues values, GeoShapeValues.GeoShapeValue geoValue, - GeoShapeValues.BoundingBox bounds) { + GeoShapeValues.BoundingBox bounds) throws IOException { // TODO: This way to discover cells inside of a bounding box seems not to work as expected. I can // see that eventually we will be visiting twice the same cell which should not happen. int idx = 0; @@ -75,7 +77,8 @@ protected int setValuesByBruteForceScan(GeoShapeCellValues values, GeoShapeValue /** * Sets a singular doc-value for the {@link GeoShapeValues.GeoShapeValue}. */ - protected int setValue(GeoShapeCellValues docValues, GeoShapeValues.GeoShapeValue geoValue, GeoShapeValues.BoundingBox bounds) { + protected int setValue(GeoShapeCellValues docValues, GeoShapeValues.GeoShapeValue geoValue, GeoShapeValues.BoundingBox bounds) + throws IOException { String hash = Geohash.stringEncode(bounds.minX(), bounds.minY(), precision); if (relateTile(geoValue, hash) != GeoRelation.QUERY_DISJOINT) { docValues.resizeCell(1); @@ -85,12 +88,12 @@ protected int setValue(GeoShapeCellValues docValues, GeoShapeValues.GeoShapeValu return 0; } - private GeoRelation relateTile(GeoShapeValues.GeoShapeValue geoValue, String hash) { + private GeoRelation relateTile(GeoShapeValues.GeoShapeValue geoValue, String hash) throws IOException { return validHash(hash) ? geoValue.relate(Geohash.toBoundingBox(hash)) : GeoRelation.QUERY_DISJOINT; } protected int setValuesByRasterization(String hash, GeoShapeCellValues values, int valuesIndex, - GeoShapeValues.GeoShapeValue geoValue) { + GeoShapeValues.GeoShapeValue geoValue) throws IOException { String[] hashes = Geohash.getSubGeohashes(hash); for (int i = 0; i < hashes.length; i++) { GeoRelation relation = relateTile(geoValue, hashes[i]); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/AbstractGeoTileGridTiler.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/AbstractGeoTileGridTiler.java index a44a371d6edff..a5f3adb20f8a1 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/AbstractGeoTileGridTiler.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/AbstractGeoTileGridTiler.java @@ -11,6 +11,8 @@ import org.elasticsearch.xpack.spatial.index.fielddata.GeoRelation; import org.elasticsearch.xpack.spatial.index.fielddata.GeoShapeValues; +import java.io.IOException; + /** * Implements most of the logic for the GeoTile aggregation. */ @@ -44,7 +46,7 @@ public long encode(double x, double y) { * @return the number of tiles set by the shape */ @Override - public int setValues(GeoShapeCellValues values, GeoShapeValues.GeoShapeValue geoValue) { + public int setValues(GeoShapeCellValues values, GeoShapeValues.GeoShapeValue geoValue) throws IOException { GeoShapeValues.BoundingBox bounds = geoValue.boundingBox(); assert bounds.minX() <= bounds.maxX(); @@ -72,7 +74,7 @@ public int setValues(GeoShapeCellValues values, GeoShapeValues.GeoShapeValue geo } } - private GeoRelation relateTile(GeoShapeValues.GeoShapeValue geoValue, int xTile, int yTile, int precision) { + private GeoRelation relateTile(GeoShapeValues.GeoShapeValue geoValue, int xTile, int yTile, int precision) throws IOException { return validTile(xTile, yTile, precision) ? geoValue.relate(GeoTileUtils.toBoundingBox(xTile, yTile, precision)) : GeoRelation.QUERY_DISJOINT; } @@ -96,7 +98,7 @@ protected int setValue(GeoShapeCellValues docValues, int xTile, int yTile) { * @return the number of buckets the geoValue is found in */ protected int setValuesByBruteForceScan(GeoShapeCellValues values, GeoShapeValues.GeoShapeValue geoValue, - int minXTile, int minYTile, int maxXTile, int maxYTile) { + int minXTile, int minYTile, int maxXTile, int maxYTile) throws IOException { int idx = 0; for (int i = minXTile; i <= maxXTile; i++) { for (int j = minYTile; j <= maxYTile; j++) { @@ -111,7 +113,7 @@ protected int setValuesByBruteForceScan(GeoShapeCellValues values, GeoShapeValue } protected int setValuesByRasterization(int xTile, int yTile, int zTile, GeoShapeCellValues values, int valuesIndex, - GeoShapeValues.GeoShapeValue geoValue) { + GeoShapeValues.GeoShapeValue geoValue) throws IOException { zTile++; for (int i = 0; i < 2; i++) { for (int j = 0; j < 2; j++) { diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTiler.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTiler.java index bb369fd3c1f42..fa13df4dbe59f 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTiler.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTiler.java @@ -9,6 +9,8 @@ import org.elasticsearch.xpack.spatial.index.fielddata.GeoShapeValues; +import java.io.IOException; + /** * The tiler to use to convert a geo value into long-encoded bucket keys for aggregating. */ @@ -42,7 +44,7 @@ public int precision() { * * @return the number of cells the geoValue intersects */ - public abstract int setValues(GeoShapeCellValues docValues, GeoShapeValues.GeoShapeValue geoValue); + public abstract int setValues(GeoShapeCellValues docValues, GeoShapeValues.GeoShapeValue geoValue) throws IOException; /** Maximum number of cells that can be created by this tiler */ protected abstract long getMaxCells(); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitorTests.java index fdebd7d44d1a3..50836f00a5635 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitorTests.java @@ -269,7 +269,7 @@ public R visit(Rectangle rectangle) throws E { }); } - static void assertRelation(GeoRelation expectedRelation, GeometryDocValueReader reader, Extent extent) { + static void assertRelation(GeoRelation expectedRelation, GeometryDocValueReader reader, Extent extent) throws IOException { Tile2DVisitor tile2DVisitor = new Tile2DVisitor(); tile2DVisitor.reset(extent.minX(), extent.minY(), extent.maxX(), extent.maxY()); reader.visit(tile2DVisitor); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeTests.java index 6bc138e95aa6b..e6254df7eb763 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeTests.java @@ -8,8 +8,9 @@ package org.elasticsearch.xpack.spatial.index.fielddata; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.store.ByteArrayDataInput; -import org.apache.lucene.store.ByteBuffersDataOutput; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.GeoShapeIndexer; @@ -27,10 +28,12 @@ public void testVisitAllTriangles() throws IOException { // write tree GeoShapeIndexer indexer = new GeoShapeIndexer(true, "test"); List fieldList = indexer.indexShape(geometry); - ByteBuffersDataOutput output = new ByteBuffersDataOutput(); + BytesStreamOutput output = new BytesStreamOutput(); TriangleTreeWriter.writeTo(output, fieldList); // read tree - ByteArrayDataInput input = new ByteArrayDataInput(output.toArrayCopy()); + ByteArrayStreamInput input = new ByteArrayStreamInput(); + BytesRef bytesRef = output.bytes().toBytesRef(); + input.reset(bytesRef.bytes, bytesRef.offset, bytesRef.length); Extent extent = new Extent(); Extent.readFromCompressed(input, extent); TriangleCounterVisitor visitor = new TriangleCounterVisitor(); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHashTilerTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHashTilerTests.java index a78d1b9dbcb96..8c8156a84fd01 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHashTilerTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHashTilerTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.spatial.index.fielddata.GeoRelation; import org.elasticsearch.xpack.spatial.index.fielddata.GeoShapeValues; +import java.io.IOException; import java.util.Arrays; import static org.elasticsearch.xpack.spatial.util.GeoTestUtils.geoShapeValue; @@ -95,7 +96,7 @@ protected int expectedBuckets(GeoShapeValues.GeoShapeValue geoValue, int precisi } private int computeBuckets(String hash, GeoBoundingBox bbox, - GeoShapeValues.GeoShapeValue geoValue, int finalPrecision) { + GeoShapeValues.GeoShapeValue geoValue, int finalPrecision) throws IOException { int count = 0; String[] hashes = Geohash.getSubGeohashes(hash); for (int i = 0; i < hashes.length; i++) { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java index f484e9c982ed8..7e6bb4ca079b5 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java @@ -55,7 +55,7 @@ public static BinaryGeoShapeDocValuesField binaryGeoShapeDocValuesField(String n return field; } - public static GeoShapeValues.GeoShapeValue geoShapeValue(Geometry geometry) { + public static GeoShapeValues.GeoShapeValue geoShapeValue(Geometry geometry) throws IOException { GeoShapeValues.GeoShapeValue value = new GeoShapeValues.GeoShapeValue(); value.reset(binaryGeoShapeDocValuesField("test", geometry).binaryValue()); return value; From 07715438b5d24ae92e517eb52f04148623f196ec Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 1 Sep 2021 07:21:15 +0200 Subject: [PATCH 049/128] Refactor of GeoShape integration tests (#77052) This commit joins GeoFilterIT and GeoShapeIntegrationIT into one test case called GeoShapeIntegTestCase which is moved into the test framework. --- .../geometry/utils/GeoHashTests.java | 30 + .../elasticsearch/search/geo/GeoFilterIT.java | 503 ---------------- .../elasticsearch/search/geo/GeoShapeIT.java | 33 ++ .../search/geo/GeoShapeIntegrationIT.java | 292 --------- .../search/geo/LegacyGeoShapeIT.java | 87 +++ .../geo/LegacyGeoShapeIntegrationIT.java | 247 -------- .../builders/MultiPolygonBuilderTests.java | 83 +++ .../search/geo/GeoShapeIntegTestCase.java | 558 ++++++++++++++++++ 8 files changed, 791 insertions(+), 1042 deletions(-) delete mode 100644 server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoFilterIT.java create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIT.java delete mode 100644 server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/search/geo/LegacyGeoShapeIT.java delete mode 100644 server/src/internalClusterTest/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java create mode 100644 test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java diff --git a/libs/geo/src/test/java/org/elasticsearch/geometry/utils/GeoHashTests.java b/libs/geo/src/test/java/org/elasticsearch/geometry/utils/GeoHashTests.java index 51545337c283c..45e92c850b4c4 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geometry/utils/GeoHashTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geometry/utils/GeoHashTests.java @@ -11,6 +11,11 @@ import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; + +import static org.elasticsearch.geometry.utils.Geohash.addNeighbors; +import static org.hamcrest.Matchers.containsInAnyOrder; + /** * Tests for {@link Geohash} */ @@ -103,4 +108,29 @@ public void testInvalidGeohashes() { assertEquals("empty geohash", ex.getMessage()); } + public void testNeighbors() { + // Simple root case + assertThat(addNeighbors("7", new ArrayList<>()), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s")); + + // Root cases (Outer cells) + assertThat(addNeighbors("0", new ArrayList<>()), containsInAnyOrder("1", "2", "3", "p", "r")); + assertThat(addNeighbors("b", new ArrayList<>()), containsInAnyOrder("8", "9", "c", "x", "z")); + assertThat(addNeighbors("p", new ArrayList<>()), containsInAnyOrder("n", "q", "r", "0", "2")); + assertThat(addNeighbors("z", new ArrayList<>()), containsInAnyOrder("8", "b", "w", "x", "y")); + + // Root crossing dateline + assertThat(addNeighbors("2", new ArrayList<>()), containsInAnyOrder("0", "1", "3", "8", "9", "p", "r", "x")); + assertThat(addNeighbors("r", new ArrayList<>()), containsInAnyOrder("0", "2", "8", "n", "p", "q", "w", "x")); + + // level1: simple case + assertThat(addNeighbors("dk", new ArrayList<>()), + containsInAnyOrder("d5", "d7", "de", "dh", "dj", "dm", "ds", "dt")); + + // Level1: crossing cells + assertThat(addNeighbors("d5", new ArrayList<>()), + containsInAnyOrder("d4", "d6", "d7", "dh", "dk", "9f", "9g", "9u")); + assertThat(addNeighbors("d0", new ArrayList<>()), + containsInAnyOrder("d1", "d2", "d3", "9b", "9c", "6p", "6r", "3z")); + } + } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoFilterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoFilterIT.java deleted file mode 100644 index f1c76403f9e4c..0000000000000 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ /dev/null @@ -1,503 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.search.geo; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; -import org.apache.lucene.spatial.query.SpatialArgs; -import org.apache.lucene.spatial.query.SpatialOperation; -import org.apache.lucene.spatial.query.UnsupportedSpatialOperation; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; -import org.elasticsearch.action.bulk.BulkItemResponse; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.geo.builders.CoordinatesBuilder; -import org.elasticsearch.common.geo.builders.LineStringBuilder; -import org.elasticsearch.common.geo.builders.MultiPolygonBuilder; -import org.elasticsearch.common.geo.builders.PointBuilder; -import org.elasticsearch.common.geo.builders.PolygonBuilder; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.core.internal.io.Streams; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.VersionUtils; -import org.junit.BeforeClass; -import org.locationtech.spatial4j.context.SpatialContext; -import org.locationtech.spatial4j.distance.DistanceUtils; -import org.locationtech.spatial4j.exception.InvalidShapeException; -import org.locationtech.spatial4j.shape.Shape; - -import java.io.ByteArrayOutputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.Random; -import java.util.zip.GZIPInputStream; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.geometry.utils.Geohash.addNeighbors; -import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery; -import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; - -public class GeoFilterIT extends ESIntegTestCase { - - @Override - protected boolean forbidPrivateIndexSettings() { - return false; - } - - private static boolean intersectSupport; - private static boolean disjointSupport; - private static boolean withinSupport; - - @BeforeClass - public static void createNodes() throws Exception { - intersectSupport = testRelationSupport(SpatialOperation.Intersects); - disjointSupport = testRelationSupport(SpatialOperation.IsDisjointTo); - withinSupport = testRelationSupport(SpatialOperation.IsWithin); - } - - private static byte[] unZipData(String path) throws IOException { - InputStream is = Streams.class.getResourceAsStream(path); - if (is == null) { - throw new FileNotFoundException("Resource [" + path + "] not found in classpath"); - } - - ByteArrayOutputStream out = new ByteArrayOutputStream(); - GZIPInputStream in = new GZIPInputStream(is); - Streams.copy(in, out); - - is.close(); - out.close(); - - return out.toByteArray(); - } - - public void testShapeBuilders() { - try { - // self intersection polygon - new PolygonBuilder(new CoordinatesBuilder() - .coordinate(-10, -10) - .coordinate(10, 10) - .coordinate(-10, 10) - .coordinate(10, -10) - .close()) - .buildS4J(); - fail("Self intersection not detected"); - } catch (InvalidShapeException e) { - } - - // polygon with hole - new PolygonBuilder(new CoordinatesBuilder() - .coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close()) - .hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(-5, -5).coordinate(-5, 5).coordinate(5, 5) - .coordinate(5, -5).close())) - .buildS4J(); - try { - // polygon with overlapping hole - new PolygonBuilder(new CoordinatesBuilder() - .coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close()) - .hole(new LineStringBuilder(new CoordinatesBuilder() - .coordinate(-5, -5).coordinate(-5, 11).coordinate(5, 11).coordinate(5, -5).close())) - .buildS4J(); - - fail("Self intersection not detected"); - } catch (InvalidShapeException e) { - } - - try { - // polygon with intersection holes - new PolygonBuilder(new CoordinatesBuilder() - .coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close()) - .hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(-5, -5).coordinate(-5, 5).coordinate(5, 5) - .coordinate(5, -5).close())) - .hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(-5, -6).coordinate(5, -6).coordinate(5, -4) - .coordinate(-5, -4).close())) - .buildS4J(); - fail("Intersection of holes not detected"); - } catch (InvalidShapeException e) { - } - - try { - // Common line in polygon - new PolygonBuilder(new CoordinatesBuilder() - .coordinate(-10, -10) - .coordinate(-10, 10) - .coordinate(-5, 10) - .coordinate(-5, -5) - .coordinate(-5, 20) - .coordinate(10, 20) - .coordinate(10, -10) - .close()) - .buildS4J(); - fail("Self intersection not detected"); - } catch (InvalidShapeException e) { - } - - // Multipolygon: polygon with hole and polygon within the whole - new MultiPolygonBuilder() - .polygon(new PolygonBuilder( - new CoordinatesBuilder().coordinate(-10, -10) - .coordinate(-10, 10) - .coordinate(10, 10) - .coordinate(10, -10).close()) - .hole(new LineStringBuilder( - new CoordinatesBuilder().coordinate(-5, -5) - .coordinate(-5, 5) - .coordinate(5, 5) - .coordinate(5, -5).close()))) - .polygon(new PolygonBuilder( - new CoordinatesBuilder() - .coordinate(-4, -4) - .coordinate(-4, 4) - .coordinate(4, 4) - .coordinate(4, -4).close())) - .buildS4J(); - } - - public void testShapeRelations() throws Exception { - assertTrue( "Intersect relation is not supported", intersectSupport); - assertTrue("Disjoint relation is not supported", disjointSupport); - assertTrue("within relation is not supported", withinSupport); - - String mapping = Strings.toString(XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("area") - .field("type", "geo_shape") - .field("tree", "geohash") - .endObject() - .endObject() - .endObject()); - - final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0); - CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("shapes") - .setMapping(mapping).setSettings(settings(version).build()); - mappingRequest.get(); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); - - // Create a multipolygon with two polygons. The first is an rectangle of size 10x10 - // with a hole of size 5x5 equidistant from all sides. This hole in turn contains - // the second polygon of size 4x4 equidistant from all sites - MultiPolygonBuilder polygon = new MultiPolygonBuilder() - .polygon(new PolygonBuilder( - new CoordinatesBuilder().coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10) - .close()) - .hole(new LineStringBuilder(new CoordinatesBuilder() - .coordinate(-5, -5).coordinate(-5, 5).coordinate(5, 5).coordinate(5, -5).close()))) - .polygon(new PolygonBuilder( - new CoordinatesBuilder().coordinate(-4, -4).coordinate(-4, 4).coordinate(4, 4).coordinate(4, -4).close())); - BytesReference data = BytesReference.bytes(jsonBuilder().startObject().field("area", polygon).endObject()); - - client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); - client().admin().indices().prepareRefresh().get(); - - // Point in polygon - SearchResponse result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(3, 3))) - .get(); - assertHitCount(result, 1); - assertFirstHit(result, hasId("1")); - - // Point in polygon hole - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(4.5, 4.5))) - .get(); - assertHitCount(result, 0); - - // by definition the border of a polygon belongs to the inner - // so the border of a polygons hole also belongs to the inner - // of the polygon NOT the hole - - // Point on polygon border - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(10.0, 5.0))) - .get(); - assertHitCount(result, 1); - assertFirstHit(result, hasId("1")); - - // Point on hole border - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(5.0, 2.0))) - .get(); - assertHitCount(result, 1); - assertFirstHit(result, hasId("1")); - - if (disjointSupport) { - // Point not in polygon - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoDisjointQuery("area", new PointBuilder(3, 3))) - .get(); - assertHitCount(result, 0); - - // Point in polygon hole - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoDisjointQuery("area", new PointBuilder(4.5, 4.5))) - .get(); - assertHitCount(result, 1); - assertFirstHit(result, hasId("1")); - } - - // Create a polygon that fills the empty area of the polygon defined above - PolygonBuilder inverse = new PolygonBuilder(new CoordinatesBuilder() - .coordinate(-5, -5).coordinate(-5, 5).coordinate(5, 5).coordinate(5, -5).close()) - .hole(new LineStringBuilder( - new CoordinatesBuilder().coordinate(-4, -4).coordinate(-4, 4).coordinate(4, 4).coordinate(4, -4).close())); - - data = BytesReference.bytes(jsonBuilder().startObject().field("area", inverse).endObject()); - client().prepareIndex("shapes").setId("2").setSource(data, XContentType.JSON).get(); - client().admin().indices().prepareRefresh().get(); - - // re-check point on polygon hole - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(4.5, 4.5))) - .get(); - assertHitCount(result, 1); - assertFirstHit(result, hasId("2")); - - // Create Polygon with hole and common edge - PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() - .coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close()) - .hole(new LineStringBuilder(new CoordinatesBuilder() - .coordinate(-5, -5).coordinate(-5, 5).coordinate(10, 5).coordinate(10, -5).close())); - - if (withinSupport) { - // Polygon WithIn Polygon - builder = new PolygonBuilder(new CoordinatesBuilder() - .coordinate(-30, -30).coordinate(-30, 30).coordinate(30, 30).coordinate(30, -30).close()); - - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoWithinQuery("area", builder.buildGeometry())) - .get(); - assertHitCount(result, 2); - } - - // Create a polygon crossing longitude 180. - builder = new PolygonBuilder(new CoordinatesBuilder() - .coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close()); - - data = BytesReference.bytes(jsonBuilder().startObject().field("area", builder).endObject()); - client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); - client().admin().indices().prepareRefresh().get(); - - // Create a polygon crossing longitude 180 with hole. - builder = new PolygonBuilder(new CoordinatesBuilder() - .coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close()) - .hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(175, -5).coordinate(185, -5).coordinate(185, 5) - .coordinate(175, 5).close())); - - data = BytesReference.bytes(jsonBuilder().startObject().field("area", builder).endObject()); - client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); - client().admin().indices().prepareRefresh().get(); - - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(174, -4).buildGeometry())) - .get(); - assertHitCount(result, 1); - - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(-174, -4).buildGeometry())) - .get(); - assertHitCount(result, 1); - - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(180, -4).buildGeometry())) - .get(); - assertHitCount(result, 0); - - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new PointBuilder(180, -6).buildGeometry())) - .get(); - assertHitCount(result, 1); - } - - public void testBulk() throws Exception { - byte[] bulkAction = unZipData("/org/elasticsearch/search/geo/gzippedmap.gz"); - Version version = VersionUtils.randomIndexCompatibleVersion(random()); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("pin") - .field("type", "geo_point"); - xContentBuilder.field("store", true) - .endObject() - .startObject("location") - .field("type", "geo_shape") - .field("ignore_malformed", true) - .endObject() - .endObject() - .endObject() - .endObject(); - - client().admin().indices().prepareCreate("countries").setSettings(settings) - .setMapping(xContentBuilder).get(); - BulkResponse bulk = client().prepareBulk().add(bulkAction, 0, bulkAction.length, null, xContentBuilder.contentType()).get(); - - for (BulkItemResponse item : bulk.getItems()) { - assertFalse("unable to index data", item.isFailed()); - } - - client().admin().indices().prepareRefresh().get(); - String key = "DE"; - - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchQuery("_id", key)) - .get(); - - assertHitCount(searchResponse, 1); - - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getId(), equalTo(key)); - } - - SearchResponse world = client().prepareSearch().addStoredField("pin").setQuery( - geoBoundingBoxQuery("pin").setCorners(90, -179.99999, -90, 179.99999) - ).get(); - - assertHitCount(world, 53); - - SearchResponse distance = client().prepareSearch().addStoredField("pin").setQuery( - geoDistanceQuery("pin").distance("425km").point(51.11, 9.851) - ).get(); - - assertHitCount(distance, 5); - GeoPoint point = new GeoPoint(); - for (SearchHit hit : distance.getHits()) { - String name = hit.getId(); - point.resetFromString(hit.getFields().get("pin").getValue()); - double dist = distance(point.getLat(), point.getLon(), 51.11, 9.851); - - assertThat("distance to '" + name + "'", dist, lessThanOrEqualTo(425000d)); - assertThat(name, anyOf(equalTo("CZ"), equalTo("DE"), equalTo("BE"), equalTo("NL"), equalTo("LU"))); - if (key.equals(name)) { - assertThat(dist, closeTo(0d, 0.1d)); - } - } - } - - public void testNeighbors() { - // Simple root case - assertThat(addNeighbors("7", new ArrayList<>()), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s")); - - // Root cases (Outer cells) - assertThat(addNeighbors("0", new ArrayList<>()), containsInAnyOrder("1", "2", "3", "p", "r")); - assertThat(addNeighbors("b", new ArrayList<>()), containsInAnyOrder("8", "9", "c", "x", "z")); - assertThat(addNeighbors("p", new ArrayList<>()), containsInAnyOrder("n", "q", "r", "0", "2")); - assertThat(addNeighbors("z", new ArrayList<>()), containsInAnyOrder("8", "b", "w", "x", "y")); - - // Root crossing dateline - assertThat(addNeighbors("2", new ArrayList<>()), containsInAnyOrder("0", "1", "3", "8", "9", "p", "r", "x")); - assertThat(addNeighbors("r", new ArrayList<>()), containsInAnyOrder("0", "2", "8", "n", "p", "q", "w", "x")); - - // level1: simple case - assertThat(addNeighbors("dk", new ArrayList<>()), - containsInAnyOrder("d5", "d7", "de", "dh", "dj", "dm", "ds", "dt")); - - // Level1: crossing cells - assertThat(addNeighbors("d5", new ArrayList<>()), - containsInAnyOrder("d4", "d6", "d7", "dh", "dk", "9f", "9g", "9u")); - assertThat(addNeighbors("d0", new ArrayList<>()), - containsInAnyOrder("d1", "d2", "d3", "9b", "9c", "6p", "6r", "3z")); - } - - public static double distance(double lat1, double lon1, double lat2, double lon2) { - return GeoUtils.EARTH_SEMI_MAJOR_AXIS * DistanceUtils.distHaversineRAD( - DistanceUtils.toRadians(lat1), - DistanceUtils.toRadians(lon1), - DistanceUtils.toRadians(lat2), - DistanceUtils.toRadians(lon2) - ); - } - - protected static boolean testRelationSupport(SpatialOperation relation) { - if (relation == SpatialOperation.IsDisjointTo) { - // disjoint works in terms of intersection - relation = SpatialOperation.Intersects; - } - try { - GeohashPrefixTree tree = new GeohashPrefixTree(SpatialContext.GEO, 3); - RecursivePrefixTreeStrategy strategy = new RecursivePrefixTreeStrategy(tree, "area"); - Shape shape = SpatialContext.GEO.makePoint(0, 0); - SpatialArgs args = new SpatialArgs(relation, shape); - strategy.makeQuery(args); - return true; - } catch (UnsupportedSpatialOperation e) { - final SpatialOperation finalRelation = relation; - LogManager.getLogger(GeoFilterIT.class) - .info(() -> new ParameterizedMessage("Unsupported spatial operation {}", finalRelation), e); - return false; - } - } - - protected static String randomhash(int length) { - return randomhash(random(), length); - } - - protected static String randomhash(Random random) { - return randomhash(random, 2 + random.nextInt(10)); - } - - protected static String randomhash() { - return randomhash(random()); - } - - protected static String randomhash(Random random, int length) { - final char[] BASE_32 = { - '0', '1', '2', '3', '4', '5', '6', '7', - '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', - 'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', - 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'}; - - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < length; i++) { - sb.append(BASE_32[random.nextInt(BASE_32.length)]); - } - - return sb.toString(); - } -} - diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIT.java new file mode 100644 index 0000000000000..70ff028360165 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIT.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.geo; + +import org.elasticsearch.Version; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.test.VersionUtils; + +import java.io.IOException; + +public class GeoShapeIT extends GeoShapeIntegTestCase { + + @Override + protected void getGeoShapeMapping(XContentBuilder b) throws IOException { + b.field("type", "geo_shape"); + } + + @Override + protected Version getVersion() { + return VersionUtils.randomIndexCompatibleVersion(random()); + } + + @Override + protected boolean allowExpensiveQueries() { + return true; + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java deleted file mode 100644 index 28d51c09e88a5..0000000000000 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java +++ /dev/null @@ -1,292 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.search.geo; - -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.IndexShardRoutingTable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.geo.Orientation; -import org.elasticsearch.common.geo.builders.PointBuilder; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.GeoShapeFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.VersionUtils; - -import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; - -public class GeoShapeIntegrationIT extends ESIntegTestCase { - - @Override - protected boolean forbidPrivateIndexSettings() { - return false; - } - - @Override - protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - return Settings.builder() - // Check that only geo-shape queries on legacy PrefixTree based - // geo shapes are disallowed. - .put("search.allow_expensive_queries", false) - .put(super.nodeSettings(nodeOrdinal, otherSettings)) - .build(); - } - - /** - * Test that orientation parameter correctly persists across cluster restart - */ - public void testOrientationPersistence() throws Exception { - String idxName = "orientation"; - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("orientation", "left") - .endObject() - .endObject().endObject()); - - // create index - assertAcked(prepareCreate(idxName).setMapping(mapping)); - - mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("orientation", "right") - .endObject() - .endObject().endObject()); - - assertAcked(prepareCreate(idxName+"2").setMapping(mapping)); - ensureGreen(idxName, idxName+"2"); - - internalCluster().fullRestart(); - ensureGreen(idxName, idxName+"2"); - - // left orientation test - IndicesService indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName)); - IndexService indexService = indicesService.indexService(resolveIndex(idxName)); - MappedFieldType fieldType = indexService.mapperService().fieldType("location"); - assertThat(fieldType, instanceOf(GeoShapeFieldMapper.GeoShapeFieldType.class)); - - GeoShapeFieldMapper.GeoShapeFieldType gsfm = (GeoShapeFieldMapper.GeoShapeFieldType)fieldType; - Orientation orientation = gsfm.orientation(); - assertThat(orientation, equalTo(Orientation.CLOCKWISE)); - assertThat(orientation, equalTo(Orientation.LEFT)); - assertThat(orientation, equalTo(Orientation.CW)); - - // right orientation test - indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName+"2")); - indexService = indicesService.indexService(resolveIndex((idxName+"2"))); - fieldType = indexService.mapperService().fieldType("location"); - assertThat(fieldType, instanceOf(GeoShapeFieldMapper.GeoShapeFieldType.class)); - - gsfm = (GeoShapeFieldMapper.GeoShapeFieldType)fieldType; - orientation = gsfm.orientation(); - assertThat(orientation, equalTo(Orientation.COUNTER_CLOCKWISE)); - assertThat(orientation, equalTo(Orientation.RIGHT)); - assertThat(orientation, equalTo(Orientation.CCW)); - } - - /** - * Test that ignore_malformed on GeoShapeFieldMapper does not fail the entire document - */ - public void testIgnoreMalformed() throws Exception { - // create index - assertAcked(client().admin().indices().prepareCreate("test") - .setMapping("shape", "type=geo_shape,ignore_malformed=true").get()); - ensureGreen(); - - // test self crossing ccw poly not crossing dateline - String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") - .startArray("coordinates") - .startArray() - .startArray().value(176.0).value(15.0).endArray() - .startArray().value(-177.0).value(10.0).endArray() - .startArray().value(-177.0).value(-10.0).endArray() - .startArray().value(176.0).value(-15.0).endArray() - .startArray().value(-177.0).value(15.0).endArray() - .startArray().value(172.0).value(0.0).endArray() - .startArray().value(176.0).value(15.0).endArray() - .endArray() - .endArray() - .endObject()); - - indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", - polygonGeoJson)); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - } - - public void testMappingUpdate() { - // create index - final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0); - assertAcked(client().admin().indices().prepareCreate("test").setSettings(settings(version).build()) - .setMapping("shape", "type=geo_shape,strategy=recursive").get()); - ensureGreen(); - - String update ="{\n" + - " \"properties\": {\n" + - " \"shape\": {\n" + - " \"type\": \"geo_shape\"" + - " }\n" + - " }\n" + - "}"; - - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().admin().indices() - .preparePutMapping("test") - .setSource(update, XContentType.JSON).get()); - assertThat(e.getMessage(), containsString("mapper [shape] of type [geo_shape] cannot change strategy from [recursive] to [BKD]")); - } - - /** - * Test that the indexed shape routing can be provided if it is required - */ - public void testIndexShapeRouting() throws Exception { - String mapping = "{\"_doc\":{\n" + - " \"_routing\": {\n" + - " \"required\": true\n" + - " },\n" + - " \"properties\": {\n" + - " \"shape\": {\n" + - " \"type\": \"geo_shape\"\n" + - " }\n" + - " }\n" + - " }}"; - - - // create index - assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping).get()); - ensureGreen(); - - String source = "{\n" + - " \"shape\" : {\n" + - " \"type\" : \"bbox\",\n" + - " \"coordinates\" : [[-45.0, 45.0], [45.0, -45.0]]\n" + - " }\n" + - "}"; - - indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON).setRouting("ABC")); - - SearchResponse searchResponse = client().prepareSearch("test").setQuery( - geoShapeQuery("shape", "0").indexedShapeIndex("test").indexedShapeRouting("ABC") - ).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - } - - public void testIndexPolygonDateLine() throws Exception { - String mappingVector = "{\n" + - " \"properties\": {\n" + - " \"shape\": {\n" + - " \"type\": \"geo_shape\"\n" + - " }\n" + - " }\n" + - " }"; - - String mappingQuad = "{\n" + - " \"properties\": {\n" + - " \"shape\": {\n" + - " \"type\": \"geo_shape\",\n" + - " \"tree\": \"quadtree\"\n" + - " }\n" + - " }\n" + - " }"; - - - // create index - assertAcked(client().admin().indices().prepareCreate("vector").setMapping(mappingVector).get()); - ensureGreen(); - - final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0); - assertAcked(client().admin().indices().prepareCreate("quad") - .setSettings(settings(version).build()).setMapping(mappingQuad).get()); - ensureGreen(); - - String source = "{\n" + - " \"shape\" : \"POLYGON((179 0, -179 0, -179 2, 179 2, 179 0))\""+ - "}"; - - indexRandom(true, client().prepareIndex("quad").setId("0").setSource(source, XContentType.JSON)); - indexRandom(true, client().prepareIndex("vector").setId("0").setSource(source, XContentType.JSON)); - - try { - ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); - updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", true)); - assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); - - SearchResponse searchResponse = client().prepareSearch("quad").setQuery( - geoShapeQuery("shape", new PointBuilder(-179.75, 1)) - ).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = client().prepareSearch("quad").setQuery( - geoShapeQuery("shape", new PointBuilder(90, 1)) - ).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - - searchResponse = client().prepareSearch("quad").setQuery( - geoShapeQuery("shape", new PointBuilder(-180, 1)) - ).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = client().prepareSearch("quad").setQuery( - geoShapeQuery("shape", new PointBuilder(180, 1)) - ).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - } finally { - ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); - updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); - assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); - } - - SearchResponse searchResponse = client().prepareSearch("vector").setQuery( - geoShapeQuery("shape", new PointBuilder(90, 1)) - ).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - - searchResponse = client().prepareSearch("vector").setQuery( - geoShapeQuery("shape", new PointBuilder(-179.75, 1)) - ).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = client().prepareSearch("vector").setQuery( - geoShapeQuery("shape", new PointBuilder(-180, 1)) - ).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = client().prepareSearch("vector").setQuery( - geoShapeQuery("shape", new PointBuilder(180, 1)) - ).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - } - - private String findNodeName(String index) { - ClusterState state = client().admin().cluster().prepareState().get().getState(); - IndexShardRoutingTable shard = state.getRoutingTable().index(index).shard(0); - String nodeId = shard.assignedShards().get(0).currentNodeId(); - return state.getNodes().get(nodeId).getName(); - } -} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/LegacyGeoShapeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/LegacyGeoShapeIT.java new file mode 100644 index 0000000000000..3aff066a5bff9 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/LegacyGeoShapeIT.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.geo; + +import org.elasticsearch.Version; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.geometry.Circle; +import org.elasticsearch.test.VersionUtils; + +import java.io.IOException; + +import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class LegacyGeoShapeIT extends GeoShapeIntegTestCase { + + @Override + protected void getGeoShapeMapping(XContentBuilder b) throws IOException { + b.field("type", "geo_shape"); + b.field("strategy", "recursive"); + } + + @Override + protected Version getVersion() { + // legacy shapes can only be created in version lower than 8.x + return VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0); + } + + @Override + protected boolean allowExpensiveQueries() { + return false; + } + + public void testMappingUpdate() { + // create index + assertAcked(client().admin().indices().prepareCreate("test").setSettings(settings(getVersion()).build()) + .setMapping("shape", "type=geo_shape,strategy=recursive").get()); + ensureGreen(); + + String update ="{\n" + + " \"properties\": {\n" + + " \"shape\": {\n" + + " \"type\": \"geo_shape\"" + + " }\n" + + " }\n" + + "}"; + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().admin().indices() + .preparePutMapping("test") + .setSource(update, XContentType.JSON).get()); + assertThat(e.getMessage(), containsString("mapper [shape] of type [geo_shape] cannot change strategy from [recursive] to [BKD]")); + } + + /** + * Test that the circle is still supported for the legacy shapes + */ + public void testLegacyCircle() throws Exception { + // create index + assertAcked(prepareCreate("test").setSettings(settings(getVersion()).build()) + .setMapping("shape", "type=geo_shape,strategy=recursive,tree=geohash").get()); + ensureGreen(); + + indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", (ToXContent) (builder, params) -> { + builder.startObject().field("type", "circle") + .startArray("coordinates").value(30).value(50).endArray() + .field("radius","77km") + .endObject(); + return builder; + })); + + // test self crossing of circles + SearchResponse searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", + new Circle(30, 50, 77000))).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java deleted file mode 100644 index c1301a2a10b85..0000000000000 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.search.geo; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.IndexShardRoutingTable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.geo.Orientation; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.geometry.Circle; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.VersionUtils; - -import java.io.IOException; - -import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; - -public class LegacyGeoShapeIntegrationIT extends ESIntegTestCase { - - @Override - protected boolean forbidPrivateIndexSettings() { - return false; - } - - /** - * Test that orientation parameter correctly persists across cluster restart - */ - public void testOrientationPersistence() throws Exception { - final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0); - String idxName = "orientation"; - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("orientation", "left") - .endObject() - .endObject().endObject()); - - // create index - assertAcked(prepareCreate(idxName).setMapping(mapping).setSettings(settings(version).build())); - - mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("orientation", "right") - .endObject() - .endObject().endObject()); - - assertAcked(prepareCreate(idxName+"2").setMapping(mapping).setSettings(settings(version).build())); - ensureGreen(idxName, idxName+"2"); - - internalCluster().fullRestart(); - ensureGreen(idxName, idxName+"2"); - - // left orientation test - IndicesService indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName)); - IndexService indexService = indicesService.indexService(resolveIndex(idxName)); - MappedFieldType fieldType = indexService.mapperService().fieldType("location"); - assertThat(fieldType, instanceOf(LegacyGeoShapeFieldMapper.GeoShapeFieldType.class)); - - LegacyGeoShapeFieldMapper.GeoShapeFieldType gsfm = (LegacyGeoShapeFieldMapper.GeoShapeFieldType)fieldType; - Orientation orientation = gsfm.orientation(); - assertThat(orientation, equalTo(Orientation.CLOCKWISE)); - assertThat(orientation, equalTo(Orientation.LEFT)); - assertThat(orientation, equalTo(Orientation.CW)); - - // right orientation test - indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName+"2")); - indexService = indicesService.indexService(resolveIndex((idxName+"2"))); - fieldType = indexService.mapperService().fieldType("location"); - assertThat(fieldType, instanceOf(LegacyGeoShapeFieldMapper.GeoShapeFieldType.class)); - - gsfm = (LegacyGeoShapeFieldMapper.GeoShapeFieldType)fieldType; - orientation = gsfm.orientation(); - assertThat(orientation, equalTo(Orientation.COUNTER_CLOCKWISE)); - assertThat(orientation, equalTo(Orientation.RIGHT)); - assertThat(orientation, equalTo(Orientation.CCW)); - } - - /** - * Test that ignore_malformed on GeoShapeFieldMapper does not fail the entire document - */ - public void testIgnoreMalformed() throws Exception { - // create index - final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0); - assertAcked(prepareCreate("test").setSettings(settings(version).build()) - .setMapping("shape", "type=geo_shape,tree=quadtree,ignore_malformed=true").get()); - ensureGreen(); - - // test self crossing ccw poly not crossing dateline - String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") - .startArray("coordinates") - .startArray() - .startArray().value(176.0).value(15.0).endArray() - .startArray().value(-177.0).value(10.0).endArray() - .startArray().value(-177.0).value(-10.0).endArray() - .startArray().value(176.0).value(-15.0).endArray() - .startArray().value(-177.0).value(15.0).endArray() - .startArray().value(172.0).value(0.0).endArray() - .startArray().value(176.0).value(15.0).endArray() - .endArray() - .endArray() - .endObject()); - - indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", - polygonGeoJson)); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - } - - /** - * Test that the indexed shape routing can be provided if it is required - */ - public void testIndexShapeRouting() throws Exception { - String mapping = "{\"_doc\":{\n" + - " \"_routing\": {\n" + - " \"required\": true\n" + - " },\n" + - " \"properties\": {\n" + - " \"shape\": {\n" + - " \"type\": \"geo_shape\",\n" + - " \"tree\" : \"quadtree\"\n" + - " }\n" + - " }\n" + - " }}"; - - final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0); - // create index - assertAcked(prepareCreate("test").setSettings(settings(version).build()).setMapping(mapping).get()); - ensureGreen(); - - String source = "{\n" + - " \"shape\" : {\n" + - " \"type\" : \"bbox\",\n" + - " \"coordinates\" : [[-45.0, 45.0], [45.0, -45.0]]\n" + - " }\n" + - "}"; - - indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON).setRouting("ABC")); - - SearchResponse searchResponse = client().prepareSearch("test").setQuery( - geoShapeQuery("shape", "0").indexedShapeIndex("test").indexedShapeRouting("ABC") - ).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - } - - /** - * Test that the circle is still supported for the legacy shapes - */ - public void testLegacyCircle() throws Exception { - // create index - final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0); - assertAcked(prepareCreate("test").setSettings(settings(version).build()) - .setMapping("shape", "type=geo_shape,strategy=recursive,tree=geohash").get()); - ensureGreen(); - - indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", (ToXContent) (builder, params) -> { - builder.startObject().field("type", "circle") - .startArray("coordinates").value(30).value(50).endArray() - .field("radius","77km") - .endObject(); - return builder; - })); - - // test self crossing of circles - SearchResponse searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", - new Circle(30, 50, 77000))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - } - - public void testDisallowExpensiveQueries() throws InterruptedException, IOException { - final Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0); - try { - // create index - assertAcked(client().admin().indices().prepareCreate("test").setSettings(settings(version).build()) - .setMapping("shape", "type=geo_shape,strategy=recursive,tree=geohash").get()); - ensureGreen(); - - indexRandom(true, client().prepareIndex("test").setId("0").setSource( - "shape", (ToXContent) (builder, params) -> { - builder.startObject().field("type", "circle") - .startArray("coordinates").value(30).value(50).endArray() - .field("radius", "77km") - .endObject(); - return builder; - })); - refresh(); - - // Execute with search.allow_expensive_queries = null => default value = false => success - SearchResponse searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", - new Circle(30, 50, 77000))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); - updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", false)); - assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); - - // Set search.allow_expensive_queries to "false" => assert failure - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> client().prepareSearch("test").setQuery(geoShapeQuery("shape", - new Circle(30, 50, 77000))).get()); - assertEquals("[geo-shape] queries on [PrefixTree geo shapes] cannot be executed when " + - "'search.allow_expensive_queries' is set to false.", e.getCause().getMessage()); - - // Set search.allow_expensive_queries to "true" => success - updateSettingsRequest = new ClusterUpdateSettingsRequest(); - updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", true)); - assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); - searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", - new Circle(30, 50, 77000))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - } finally { - ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); - updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); - assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); - } - } - - private String findNodeName(String index) { - ClusterState state = client().admin().cluster().prepareState().get().getState(); - IndexShardRoutingTable shard = state.getRoutingTable().index(index).shard(0); - String nodeId = shard.assignedShards().get(0).currentNodeId(); - return state.getNodes().get(nodeId).getName(); - } -} diff --git a/server/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java b/server/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java index 409d30a3e18cd..33e90ce1873c8 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilderTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.test.geo.RandomShapeGenerator; import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; +import org.locationtech.spatial4j.exception.InvalidShapeException; import java.io.IOException; @@ -54,4 +55,86 @@ static MultiPolygonBuilder createRandomShape() { } return mpb; } + + public void testInvalidPolygonBuilders() { + try { + // self intersection polygon + new PolygonBuilder(new CoordinatesBuilder() + .coordinate(-10, -10) + .coordinate(10, 10) + .coordinate(-10, 10) + .coordinate(10, -10) + .close()) + .buildS4J(); + fail("Self intersection not detected"); + } catch (InvalidShapeException e) { + } + + // polygon with hole + new PolygonBuilder(new CoordinatesBuilder() + .coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close()) + .hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(-5, -5).coordinate(-5, 5).coordinate(5, 5) + .coordinate(5, -5).close())) + .buildS4J(); + try { + // polygon with overlapping hole + new PolygonBuilder(new CoordinatesBuilder() + .coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close()) + .hole(new LineStringBuilder(new CoordinatesBuilder() + .coordinate(-5, -5).coordinate(-5, 11).coordinate(5, 11).coordinate(5, -5).close())) + .buildS4J(); + + fail("Self intersection not detected"); + } catch (InvalidShapeException e) { + } + + try { + // polygon with intersection holes + new PolygonBuilder(new CoordinatesBuilder() + .coordinate(-10, -10).coordinate(-10, 10).coordinate(10, 10).coordinate(10, -10).close()) + .hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(-5, -5).coordinate(-5, 5).coordinate(5, 5) + .coordinate(5, -5).close())) + .hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(-5, -6).coordinate(5, -6).coordinate(5, -4) + .coordinate(-5, -4).close())) + .buildS4J(); + fail("Intersection of holes not detected"); + } catch (InvalidShapeException e) { + } + + try { + // Common line in polygon + new PolygonBuilder(new CoordinatesBuilder() + .coordinate(-10, -10) + .coordinate(-10, 10) + .coordinate(-5, 10) + .coordinate(-5, -5) + .coordinate(-5, 20) + .coordinate(10, 20) + .coordinate(10, -10) + .close()) + .buildS4J(); + fail("Self intersection not detected"); + } catch (InvalidShapeException e) { + } + + // Multipolygon: polygon with hole and polygon within the whole + new MultiPolygonBuilder() + .polygon(new PolygonBuilder( + new CoordinatesBuilder().coordinate(-10, -10) + .coordinate(-10, 10) + .coordinate(10, 10) + .coordinate(10, -10).close()) + .hole(new LineStringBuilder( + new CoordinatesBuilder().coordinate(-5, -5) + .coordinate(-5, 5) + .coordinate(5, 5) + .coordinate(5, -5).close()))) + .polygon(new PolygonBuilder( + new CoordinatesBuilder() + .coordinate(-4, -4) + .coordinate(-4, 4) + .coordinate(4, 4) + .coordinate(4, -4).close())) + .buildS4J(); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java new file mode 100644 index 0000000000000..e3ac28daeba19 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java @@ -0,0 +1,558 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.search.geo; + +import org.apache.lucene.util.SloppyMath; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.internal.io.Streams; +import org.elasticsearch.geometry.Circle; +import org.elasticsearch.geometry.LinearRing; +import org.elasticsearch.geometry.MultiPolygon; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.Polygon; +import org.elasticsearch.geometry.utils.WellKnownText; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.VersionUtils; + +import java.io.ByteArrayOutputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.util.List; +import java.util.zip.GZIPInputStream; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery; +import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; +import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +public abstract class GeoShapeIntegTestCase extends ESIntegTestCase { + + protected abstract void getGeoShapeMapping(XContentBuilder b) throws IOException; + + protected abstract Version getVersion(); + + protected abstract boolean allowExpensiveQueries(); + + @Override + protected boolean forbidPrivateIndexSettings() { + return false; + } + + /** + * Test that orientation parameter correctly persists across cluster restart + */ + public void testOrientationPersistence() throws Exception { + String idxName = "orientation"; + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("location"); + getGeoShapeMapping(mapping); + mapping.field("orientation", "left").endObject().endObject().endObject(); + + // create index + assertAcked(prepareCreate(idxName).setMapping(mapping).setSettings(settings(getVersion()).build())); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("location"); + getGeoShapeMapping(mapping); + mapping.field("orientation", "right").endObject().endObject().endObject(); + + assertAcked(prepareCreate(idxName+"2").setMapping(mapping).setSettings(settings(getVersion()).build())); + ensureGreen(idxName, idxName+"2"); + + internalCluster().fullRestart(); + ensureGreen(idxName, idxName+"2"); + + // left orientation test + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName)); + IndexService indexService = indicesService.indexService(resolveIndex(idxName)); + MappedFieldType fieldType = indexService.mapperService().fieldType("location"); + assertThat(fieldType, instanceOf(AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType.class)); + + AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType gsfm = + (AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType)fieldType; + Orientation orientation = gsfm.orientation(); + assertThat(orientation, equalTo(Orientation.CLOCKWISE)); + assertThat(orientation, equalTo(Orientation.LEFT)); + assertThat(orientation, equalTo(Orientation.CW)); + + // right orientation test + indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName+"2")); + indexService = indicesService.indexService(resolveIndex((idxName+"2"))); + fieldType = indexService.mapperService().fieldType("location"); + assertThat(fieldType, instanceOf(AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType.class)); + + gsfm = (AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType)fieldType; + orientation = gsfm.orientation(); + assertThat(orientation, equalTo(Orientation.COUNTER_CLOCKWISE)); + assertThat(orientation, equalTo(Orientation.RIGHT)); + assertThat(orientation, equalTo(Orientation.CCW)); + } + + /** + * Test that ignore_malformed on GeoShapeFieldMapper does not fail the entire document + */ + public void testIgnoreMalformed() throws Exception { + // create index + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("shape"); + getGeoShapeMapping(mapping); + mapping.field("ignore_malformed", true).endObject().endObject().endObject(); + assertAcked(prepareCreate("test").setMapping(mapping).setSettings(settings(getVersion()).build())); + ensureGreen(); + + // test self crossing ccw poly not crossing dateline + String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + .startArray("coordinates") + .startArray() + .startArray().value(176.0).value(15.0).endArray() + .startArray().value(-177.0).value(10.0).endArray() + .startArray().value(-177.0).value(-10.0).endArray() + .startArray().value(176.0).value(-15.0).endArray() + .startArray().value(-177.0).value(15.0).endArray() + .startArray().value(172.0).value(0.0).endArray() + .startArray().value(176.0).value(15.0).endArray() + .endArray() + .endArray() + .endObject()); + + indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", + polygonGeoJson)); + SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + } + + /** + * Test that the indexed shape routing can be provided if it is required + */ + public void testIndexShapeRouting() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("_doc").startObject("_routing").field("required", true).endObject() + .startObject("properties").startObject("shape"); + getGeoShapeMapping(mapping); + mapping.endObject().endObject().endObject().endObject(); + + // create index + assertAcked(prepareCreate("test").setMapping(mapping).setSettings(settings(getVersion()).build())); + ensureGreen(); + + String source = "{\n" + + " \"shape\" : {\n" + + " \"type\" : \"bbox\",\n" + + " \"coordinates\" : [[-45.0, 45.0], [45.0, -45.0]]\n" + + " }\n" + + "}"; + + indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON).setRouting("ABC")); + + SearchResponse searchResponse = client().prepareSearch("test").setQuery( + geoShapeQuery("shape", "0").indexedShapeIndex("test").indexedShapeRouting("ABC") + ).get(); + + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + } + + public void testIndexPolygonDateLine() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("shape"); + getGeoShapeMapping(mapping); + mapping.endObject().endObject().endObject(); + + // create index + assertAcked(client().admin().indices().prepareCreate("test").setSettings(settings(getVersion()).build()).setMapping(mapping).get()); + ensureGreen(); + + String source = "{\n" + + " \"shape\" : \"POLYGON((179 0, -179 0, -179 2, 179 2, 179 0))\"" + + "}"; + + indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON)); + + SearchResponse searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-179.75, 1))).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + + searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(90, 1))).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + + searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-180, 1))).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + + searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(180, 1))).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + } + + public void testDisallowExpensiveQueries() throws InterruptedException, IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("shape"); + getGeoShapeMapping(mapping); + mapping.endObject().endObject().endObject(); + + // create index + assertAcked(client().admin().indices().prepareCreate("test").setSettings(settings(getVersion()).build()).setMapping(mapping).get()); + ensureGreen(); + + String source = "{\n" + + " \"shape\" : {\n" + + " \"type\" : \"bbox\",\n" + + " \"coordinates\" : [[-45.0, 45.0], [45.0, -45.0]]\n" + + " }\n" + + "}"; + + indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON)); + refresh(); + + try { + // Execute with search.allow_expensive_queries to false + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", false)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + + SearchRequestBuilder builder = client().prepareSearch("test").setQuery(geoShapeQuery("shape", + new Circle(0, 0, 77000))); + if (allowExpensiveQueries()) { + assertThat(builder.get().getHits().getTotalHits().value, equalTo(1L)); + } else { + ElasticsearchException e = expectThrows(ElasticsearchException.class, builder::get); + assertEquals("[geo-shape] queries on [PrefixTree geo shapes] cannot be executed when " + + "'search.allow_expensive_queries' is set to false.", e.getCause().getMessage()); + } + + // Set search.allow_expensive_queries to "null" + updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + assertThat(builder.get().getHits().getTotalHits().value, equalTo(1L)); + + // Set search.allow_expensive_queries to "true" + updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", true)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + assertThat(builder.get().getHits().getTotalHits().value, equalTo(1L)); + } finally { + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + } + } + + public void testShapeRelations() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("area"); + getGeoShapeMapping(mapping); + mapping.endObject().endObject().endObject(); + + final Version version = getVersion(); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("shapes") + .setMapping(mapping).setSettings(settings(version).build()); + mappingRequest.get(); + client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); + + // Create a multipolygon with two polygons. The first is an rectangle of size 10x10 + // with a hole of size 5x5 equidistant from all sides. This hole in turn contains + // the second polygon of size 4x4 equidistant from all sites + List polygons = List.of( + new Polygon( + new LinearRing( + new double[] {-10, -10, 10, 10, -10}, + new double[] {-10, 10, 10, -10, -10} + ), + List.of( + new LinearRing( + new double[] {-5, -5, 5, 5, -5}, + new double[] {-5, 5, 5, -5, -5} + ) + ) + ), + new Polygon( + new LinearRing( + new double[] {-4, -4, 4, 4, -4}, + new double[] {-4, 4, 4, -4, -4} + ) + ) + ); + + BytesReference data = BytesReference.bytes( + jsonBuilder().startObject().field("area", WellKnownText.toWKT(new MultiPolygon(polygons))).endObject() + ); + + client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); + client().admin().indices().prepareRefresh().get(); + + // Point in polygon + SearchResponse result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new Point(3, 3))) + .get(); + assertHitCount(result, 1); + assertFirstHit(result, hasId("1")); + + // Point in polygon hole + result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new Point(4.5, 4.5))) + .get(); + assertHitCount(result, 0); + + // by definition the border of a polygon belongs to the inner + // so the border of a polygons hole also belongs to the inner + // of the polygon NOT the hole + + // Point on polygon border + result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new Point(10.0, 5.0))) + .get(); + assertHitCount(result, 1); + assertFirstHit(result, hasId("1")); + + // Point on hole border + result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new Point(5.0, 2.0))) + .get(); + assertHitCount(result, 1); + assertFirstHit(result, hasId("1")); + + + // Point not in polygon + result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoDisjointQuery("area", new Point(3, 3))) + .get(); + assertHitCount(result, 0); + + // Point in polygon hole + result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoDisjointQuery("area", new Point(4.5, 4.5))) + .get(); + assertHitCount(result, 1); + assertFirstHit(result, hasId("1")); + + // Create a polygon that fills the empty area of the polygon defined above + Polygon inverse = new Polygon( + new LinearRing( + new double[] {-5, -5, 5, 5, -5}, + new double[] {-5, 5, 5, -5, -5} + ), + List.of( + new LinearRing( + new double[] {-4, -4, 4, 4, -4}, + new double[] {-4, 4, 4, -4, -4} + ) + ) + ); + + data = BytesReference.bytes(jsonBuilder().startObject().field("area", WellKnownText.toWKT(inverse)).endObject()); + client().prepareIndex("shapes").setId("2").setSource(data, XContentType.JSON).get(); + client().admin().indices().prepareRefresh().get(); + + // re-check point on polygon hole + result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new Point(4.5, 4.5))) + .get(); + assertHitCount(result, 1); + assertFirstHit(result, hasId("2")); + + + // Polygon WithIn Polygon + Polygon WithIn = new Polygon( + new LinearRing( + new double[] {-30, -30, 30, 30, -30}, + new double[] {-30, 30, 30, -30, -30} + ) + ); + + result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoWithinQuery("area", WithIn)) + .get(); + assertHitCount(result, 2); + + // Create a polygon crossing longitude 180. + Polygon crossing = new Polygon( + new LinearRing( + new double[] {170, 190, 190, 170, 170}, + new double[] {-10, -10, 10, 10, -10} + ) + ); + + data = BytesReference.bytes(jsonBuilder().startObject().field("area", WellKnownText.toWKT(crossing)).endObject()); + client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); + client().admin().indices().prepareRefresh().get(); + + // Create a polygon crossing longitude 180 with hole. + crossing = new Polygon( + new LinearRing( + new double[] {170, 190, 190, 170, 170}, + new double[] {-10, -10, 10, 10, -10} + ), + List.of( + new LinearRing( + new double[] {175, 185, 185, 175, 175}, + new double[] {-5, -5, 5, 5, -5} + ) + ) + ); + + data = BytesReference.bytes(jsonBuilder().startObject().field("area", WellKnownText.toWKT(crossing)).endObject()); + client().prepareIndex("shapes").setId("1").setSource(data, XContentType.JSON).get(); + client().admin().indices().prepareRefresh().get(); + + result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new Point(174, -4))) + .get(); + assertHitCount(result, 1); + + result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new Point(-174, -4))) + .get(); + assertHitCount(result, 1); + + result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new Point(180, -4))) + .get(); + assertHitCount(result, 0); + + result = client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(QueryBuilders.geoIntersectionQuery("area", new Point(180, -6))) + .get(); + assertHitCount(result, 1); + } + + public void testBulk() throws Exception { + byte[] bulkAction = unZipData("/org/elasticsearch/search/geo/gzippedmap.gz"); + Version version = VersionUtils.randomIndexCompatibleVersion(random()); + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("pin") + .field("type", "geo_point"); + xContentBuilder.field("store", true) + .endObject() + .startObject("location"); + getGeoShapeMapping(xContentBuilder); + xContentBuilder.field("ignore_malformed", true) + .endObject() + .endObject() + .endObject() + .endObject(); + + client().admin().indices().prepareCreate("countries").setSettings(settings) + .setMapping(xContentBuilder).get(); + BulkResponse bulk = client().prepareBulk().add(bulkAction, 0, bulkAction.length, null, xContentBuilder.contentType()).get(); + + for (BulkItemResponse item : bulk.getItems()) { + assertFalse("unable to index data", item.isFailed()); + } + + client().admin().indices().prepareRefresh().get(); + String key = "DE"; + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchQuery("_id", key)) + .get(); + + assertHitCount(searchResponse, 1); + + for (SearchHit hit : searchResponse.getHits()) { + assertThat(hit.getId(), equalTo(key)); + } + + SearchResponse world = client().prepareSearch().addStoredField("pin").setQuery( + geoBoundingBoxQuery("pin").setCorners(90, -179.99999, -90, 179.99999) + ).get(); + + assertHitCount(world, 53); + + SearchResponse distance = client().prepareSearch().addStoredField("pin").setQuery( + geoDistanceQuery("pin").distance("425km").point(51.11, 9.851) + ).get(); + + assertHitCount(distance, 5); + GeoPoint point = new GeoPoint(); + for (SearchHit hit : distance.getHits()) { + String name = hit.getId(); + point.resetFromString(hit.getFields().get("pin").getValue()); + double dist = distance(point.getLat(), point.getLon(), 51.11, 9.851); + + assertThat("distance to '" + name + "'", dist, lessThanOrEqualTo(425000d)); + assertThat(name, anyOf(equalTo("CZ"), equalTo("DE"), equalTo("BE"), equalTo("NL"), equalTo("LU"))); + if (key.equals(name)) { + assertThat(dist, closeTo(0d, 0.1d)); + } + } + } + + private String findNodeName(String index) { + ClusterState state = client().admin().cluster().prepareState().get().getState(); + IndexShardRoutingTable shard = state.getRoutingTable().index(index).shard(0); + String nodeId = shard.assignedShards().get(0).currentNodeId(); + return state.getNodes().get(nodeId).getName(); + } + + private byte[] unZipData(String path) throws IOException { + InputStream is = Streams.class.getResourceAsStream(path); + if (is == null) { + throw new FileNotFoundException("Resource [" + path + "] not found in classpath"); + } + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + GZIPInputStream in = new GZIPInputStream(is); + Streams.copy(in, out); + + is.close(); + out.close(); + + return out.toByteArray(); + } + + private double distance(double lat1, double lon1, double lat2, double lon2) { + return SloppyMath.haversinMeters(lat1, lon1, lat2, lon2); + } +} From ac803f078c0292ce947a1138665877161276454d Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 1 Sep 2021 07:25:29 +0100 Subject: [PATCH 050/128] Drop version field from changelog YAML (#76985) The changelog generation process currently relies on version information being present in the changelog YAML descriptors. However, this makes them difficult to update in some scenarios. For example, if a PR is merged and subsequently labelled for backporting, our automation won't update the versions in the changelog YAML. We can make the process more flexible by removing version data from the changelog YAML files, and instead inferring the versions from each changelog YAML file's existence in the git tree at each tag in the minor series. This change makes the process more ergonomic for developers, but harder to test, since I can't simply concoct YAML data for a range of versions. Instead, I've added a number of unit tests, and tried to exercise all the relevant parts. It is now an error to include `versions` the YAML file. --- .../release/BreakingChangesGenerator.java | 26 +- .../internal/release/ChangelogEntry.java | 19 +- .../release/GenerateReleaseNotesTask.java | 233 ++++++++++--- .../gradle/internal/release/GitWrapper.java | 89 +++++ .../internal/release/QualifiedVersion.java | 182 +++++++++++ .../release/ReleaseHighlightsGenerator.java | 6 +- .../release/ReleaseNotesGenerator.java | 68 ++-- .../release/ReleaseNotesIndexGenerator.java | 64 ++++ .../release/ReleaseNotesIndexUpdater.java | 97 ------ .../internal/release/ReleaseToolsPlugin.java | 8 +- .../src/main/resources/changelog-schema.json | 11 +- .../templates/release-highlights.asciidoc | 2 +- .../templates/release-notes-index.asciidoc | 4 +- .../templates/release-notes.asciidoc | 18 +- .../release/BreakingChangesGeneratorTest.java | 83 +++++ .../release/GenerateReleaseNotesTaskTest.java | 307 ++++++++++++++++++ .../ReleaseHighlightsGeneratorTest.java | 77 +++++ .../release/ReleaseNotesGeneratorTest.java | 126 +++++++ .../ReleaseNotesIndexGeneratorTest.java | 62 ++++ ...ChangesGeneratorTest.generateFile.asciidoc | 81 +++++ ...hlightsGeneratorTest.generateFile.asciidoc | 40 +++ ...seNotesGeneratorTest.generateFile.asciidoc | 105 ++++++ ...esIndexGeneratorTest.generateFile.asciidoc | 27 ++ docs/changelog/70635.yaml | 3 - docs/changelog/75981.yaml | 4 - docs/changelog/76192.yaml | 3 - 26 files changed, 1510 insertions(+), 235 deletions(-) create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GitWrapper.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/QualifiedVersion.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGenerator.java delete mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexUpdater.java create mode 100644 build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java create mode 100644 build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java create mode 100644 build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java create mode 100644 build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java create mode 100644 build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.java create mode 100644 build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateFile.asciidoc create mode 100644 build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc create mode 100644 build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.generateFile.asciidoc create mode 100644 build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.generateFile.asciidoc diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java index 691aa47d9ebbc..0b0008a44a538 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java @@ -12,12 +12,12 @@ import com.google.common.annotations.VisibleForTesting; -import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.VersionProperties; import java.io.File; import java.io.FileWriter; import java.io.IOException; +import java.io.Writer; import java.nio.file.Files; import java.util.HashMap; import java.util.List; @@ -26,6 +26,9 @@ import java.util.TreeMap; import java.util.stream.Collectors; +import static java.util.Comparator.comparing; +import static java.util.stream.Collectors.groupingBy; + /** * Generates the page that lists the breaking changes and deprecations for a minor version release. */ @@ -33,33 +36,40 @@ public class BreakingChangesGenerator { static void update(File templateFile, File outputFile, List entries) throws IOException { try (FileWriter output = new FileWriter(outputFile)) { - generateFile(Files.readString(templateFile.toPath()), output, entries); + generateFile( + QualifiedVersion.of(VersionProperties.getElasticsearch()), + Files.readString(templateFile.toPath()), + output, + entries + ); } } @VisibleForTesting - private static void generateFile(String template, FileWriter outputWriter, List entries) throws IOException { - final Version version = VersionProperties.getElasticsearchVersion(); + static void generateFile(QualifiedVersion version, String template, Writer outputWriter, List entries) + throws IOException { final Map>> breakingChangesByNotabilityByArea = entries.stream() .map(ChangelogEntry::getBreaking) .filter(Objects::nonNull) + .sorted(comparing(ChangelogEntry.Breaking::getTitle)) .collect( - Collectors.groupingBy( + groupingBy( ChangelogEntry.Breaking::isNotable, - Collectors.groupingBy(ChangelogEntry.Breaking::getArea, TreeMap::new, Collectors.toList()) + groupingBy(ChangelogEntry.Breaking::getArea, TreeMap::new, Collectors.toList()) ) ); final Map> deprecationsByArea = entries.stream() .map(ChangelogEntry::getDeprecation) .filter(Objects::nonNull) - .collect(Collectors.groupingBy(ChangelogEntry.Deprecation::getArea, TreeMap::new, Collectors.toList())); + .sorted(comparing(ChangelogEntry.Deprecation::getTitle)) + .collect(groupingBy(ChangelogEntry.Deprecation::getArea, TreeMap::new, Collectors.toList())); final Map bindings = new HashMap<>(); bindings.put("breakingChangesByNotabilityByArea", breakingChangesByNotabilityByArea); bindings.put("deprecationsByArea", deprecationsByArea); - bindings.put("isElasticsearchSnapshot", VersionProperties.isElasticsearchSnapshot()); + bindings.put("isElasticsearchSnapshot", version.isSnapshot()); bindings.put("majorDotMinor", version.getMajor() + "." + version.getMinor()); bindings.put("majorMinor", String.valueOf(version.getMajor()) + version.getMinor()); bindings.put("nextMajor", (version.getMajor() + 1) + ".0"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ChangelogEntry.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ChangelogEntry.java index 08b03b35ccd63..19b9ed2f274a4 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ChangelogEntry.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ChangelogEntry.java @@ -37,7 +37,6 @@ public class ChangelogEntry { private Highlight highlight; private Breaking breaking; private Deprecation deprecation; - private List versions; private static final ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory()); @@ -113,14 +112,6 @@ public void setDeprecation(Deprecation deprecation) { this.deprecation = deprecation; } - public List getVersions() { - return versions; - } - - public void setVersions(List versions) { - this.versions = versions; - } - @Override public boolean equals(Object o) { if (this == o) { @@ -136,20 +127,19 @@ public boolean equals(Object o) { && Objects.equals(type, that.type) && Objects.equals(summary, that.summary) && Objects.equals(highlight, that.highlight) - && Objects.equals(breaking, that.breaking) - && Objects.equals(versions, that.versions); + && Objects.equals(breaking, that.breaking); } @Override public int hashCode() { - return Objects.hash(pr, issues, area, type, summary, highlight, breaking, versions); + return Objects.hash(pr, issues, area, type, summary, highlight, breaking); } @Override public String toString() { return String.format( Locale.ROOT, - "ChangelogEntry{pr=%d, issues=%s, area='%s', type='%s', summary='%s', highlight=%s, breaking=%s, deprecation=%s versions=%s}", + "ChangelogEntry{pr=%d, issues=%s, area='%s', type='%s', summary='%s', highlight=%s, breaking=%s, deprecation=%s}", pr, issues, area, @@ -157,8 +147,7 @@ public String toString() { summary, highlight, breaking, - deprecation, - versions + deprecation ); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java index 5d5e1edf9b99e..0769996c50a88 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java @@ -8,9 +8,11 @@ package org.elasticsearch.gradle.internal.release; -import org.elasticsearch.gradle.Version; +import com.google.common.annotations.VisibleForTesting; + import org.elasticsearch.gradle.VersionProperties; import org.gradle.api.DefaultTask; +import org.gradle.api.GradleException; import org.gradle.api.file.ConfigurableFileCollection; import org.gradle.api.file.FileCollection; import org.gradle.api.file.RegularFile; @@ -22,12 +24,23 @@ import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.OutputFile; import org.gradle.api.tasks.TaskAction; +import org.gradle.process.ExecOperations; -import javax.inject.Inject; +import java.io.File; import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; import java.util.List; -import java.util.function.Predicate; -import java.util.stream.Collectors; +import java.util.Map; +import java.util.Set; +import javax.inject.Inject; + +import static java.util.Comparator.naturalOrder; +import static java.util.stream.Collectors.toList; +import static java.util.stream.Collectors.toSet; /** * Orchestrates the steps required to generate or update various release notes files. @@ -47,8 +60,10 @@ public class GenerateReleaseNotesTask extends DefaultTask { private final RegularFileProperty releaseHighlightsFile; private final RegularFileProperty breakingChangesFile; + private final GitWrapper gitWrapper; + @Inject - public GenerateReleaseNotesTask(ObjectFactory objectFactory) { + public GenerateReleaseNotesTask(ObjectFactory objectFactory, ExecOperations execOperations) { changelogs = objectFactory.fileCollection(); releaseNotesIndexTemplate = objectFactory.fileProperty(); @@ -60,56 +75,194 @@ public GenerateReleaseNotesTask(ObjectFactory objectFactory) { releaseNotesFile = objectFactory.fileProperty(); releaseHighlightsFile = objectFactory.fileProperty(); breakingChangesFile = objectFactory.fileProperty(); + + gitWrapper = new GitWrapper(execOperations); } @TaskAction public void executeTask() throws IOException { + if (needsGitTags(VersionProperties.getElasticsearch())) { + findAndUpdateUpstreamRemote(gitWrapper); + } + LOGGER.info("Finding changelog files..."); - final Version checkoutVersion = VersionProperties.getElasticsearchVersion(); + final Map> filesByVersion = partitionFilesByVersion( + gitWrapper, + VersionProperties.getElasticsearch(), + this.changelogs.getFiles() + ); - final List entries = this.changelogs.getFiles() - .stream() - .map(ChangelogEntry::parse) - .filter( - // Only process changelogs that are included in this minor version series of ES. - // If this change was released in an earlier major or minor version of Elasticsearch, do not - // include it in the notes. An earlier patch version is OK, the release notes include changes - // for every patch release in a minor series. - log -> { - final List versionsForChangelogFile = log.getVersions() - .stream() - .map(v -> Version.fromString(v, Version.Mode.RELAXED)) - .collect(Collectors.toList()); - - final Predicate includedInSameMinor = v -> v.getMajor() == checkoutVersion.getMajor() - && v.getMinor() == checkoutVersion.getMinor(); - - final Predicate includedInEarlierMajorOrMinor = v -> v.getMajor() < checkoutVersion.getMajor() - || (v.getMajor() == checkoutVersion.getMajor() && v.getMinor() < checkoutVersion.getMinor()); - - boolean includedInThisMinor = versionsForChangelogFile.stream().anyMatch(includedInSameMinor); - - if (includedInThisMinor) { - return versionsForChangelogFile.stream().noneMatch(includedInEarlierMajorOrMinor); - } else { - return false; - } - } - ) - .collect(Collectors.toList()); + final List entries = new ArrayList<>(); + final Map> changelogsByVersion = new HashMap<>(); + + filesByVersion.forEach((version, files) -> { + Set entriesForVersion = files.stream().map(ChangelogEntry::parse).collect(toSet()); + entries.addAll(entriesForVersion); + changelogsByVersion.put(version, entriesForVersion); + }); + + final Set versions = getVersions(gitWrapper, VersionProperties.getElasticsearch()); LOGGER.info("Updating release notes index..."); - ReleaseNotesIndexUpdater.update(this.releaseNotesIndexTemplate.get().getAsFile(), this.releaseNotesIndexFile.get().getAsFile()); + ReleaseNotesIndexGenerator.update( + versions, + this.releaseNotesIndexTemplate.get().getAsFile(), + this.releaseNotesIndexFile.get().getAsFile() + ); LOGGER.info("Generating release notes..."); - ReleaseNotesGenerator.update(this.releaseNotesTemplate.get().getAsFile(), this.releaseNotesFile.get().getAsFile(), entries); + ReleaseNotesGenerator.update( + this.releaseNotesTemplate.get().getAsFile(), + this.releaseNotesFile.get().getAsFile(), + changelogsByVersion + ); LOGGER.info("Generating release highlights..."); - ReleaseHighlightsGenerator.update(this.releaseHighlightsTemplate.get().getAsFile(), this.releaseHighlightsFile.get().getAsFile(), entries); + ReleaseHighlightsGenerator.update( + this.releaseHighlightsTemplate.get().getAsFile(), + this.releaseHighlightsFile.get().getAsFile(), + entries + ); LOGGER.info("Generating breaking changes / deprecations notes..."); - BreakingChangesGenerator.update(this.breakingChangesTemplate.get().getAsFile(), this.breakingChangesFile.get().getAsFile(), entries); + BreakingChangesGenerator.update( + this.breakingChangesTemplate.get().getAsFile(), + this.breakingChangesFile.get().getAsFile(), + entries + ); + } + + /** + * Find all tags in the `major.minor` series for the supplied version + * @param gitWrapper used to call `git` + * @param currentVersion the version to base the query upon + * @return all versions in the series + */ + @VisibleForTesting + static Set getVersions(GitWrapper gitWrapper, String currentVersion) { + QualifiedVersion v = QualifiedVersion.of(currentVersion); + Set versions = gitWrapper.listVersions("v" + v.getMajor() + '.' + v.getMinor() + ".*").collect(toSet()); + versions.add(v); + return versions; + } + + /** + * Group a set of files by the version in which they first appeared, up until the supplied version. Any files not + * present in an earlier version are assumed to have been introduced in the specified version. + * + *

This method works by finding all git tags prior to {@param versionString} in the same minor series, and + * examining the git tree for that tag. By doing this over each tag, it is possible to see how the contents + * of the changelog directory changed over time. + * + * @param gitWrapper used to call `git` + * @param versionString the "current" version. Does not require a tag in git. + * @param allFilesInCheckout the files to partition + * @return a mapping from version to the files added in that version. + */ + @VisibleForTesting + static Map> partitionFilesByVersion( + GitWrapper gitWrapper, + String versionString, + Set allFilesInCheckout + ) { + if (needsGitTags(versionString) == false) { + return Map.of(QualifiedVersion.of(versionString), allFilesInCheckout); + } + + QualifiedVersion currentVersion = QualifiedVersion.of(versionString); + + // Find all tags for this minor series, using a wildcard tag pattern. + String tagWildcard = "v%d.%d*".formatted(currentVersion.getMajor(), currentVersion.getMinor()); + + final List earlierVersions = gitWrapper.listVersions(tagWildcard) + // Only keep earlier versions, and if `currentVersion` is a prerelease, then only prereleases too. + .filter(each -> each.isBefore(currentVersion) && (currentVersion.hasQualifier() == each.hasQualifier())) + .sorted(naturalOrder()) + .collect(toList()); + + if (earlierVersions.isEmpty()) { + throw new GradleException("Failed to find git tags prior to [v" + currentVersion + "]"); + } + + Map> partitionedFiles = new HashMap<>(); + + Set mutableAllFilesInCheckout = new HashSet<>(allFilesInCheckout); + + // 1. For each earlier version + earlierVersions.forEach(earlierVersion -> { + // 2. Find all the changelog files it contained + Set filesInTreeForVersion = gitWrapper.listFiles("v" + earlierVersion, "docs/changelog") + .map(line -> Path.of(line).getFileName().toString()) + .collect(toSet()); + + Set filesForVersion = new HashSet<>(); + partitionedFiles.put(earlierVersion, filesForVersion); + + // 3. Find the `File` object for each one + final Iterator filesIterator = mutableAllFilesInCheckout.iterator(); + while (filesIterator.hasNext()) { + File nextFile = filesIterator.next(); + if (filesInTreeForVersion.contains(nextFile.getName())) { + // 4. And remove it so that it is associated with the earlier version + filesForVersion.add(nextFile); + filesIterator.remove(); + } + } + }); + + // 5. Associate whatever is left with the current version. + partitionedFiles.put(currentVersion, mutableAllFilesInCheckout); + + return partitionedFiles; + } + + /** + * Ensure the upstream git remote is up-to-date. The upstream is whatever git remote references `elastic/elasticsearch`. + * @param gitWrapper used to call `git` + */ + private static void findAndUpdateUpstreamRemote(GitWrapper gitWrapper) { + LOGGER.info("Finding upstream git remote"); + // We need to ensure the tags are up-to-date. Find the correct remote to use + String upstream = gitWrapper.listRemotes() + .entrySet() + .stream() + .filter(entry -> entry.getValue().contains("elastic/elasticsearch")) + .findFirst() + .map(Map.Entry::getKey) + .orElseThrow( + () -> new GradleException( + "I need to ensure the git tags are up-to-date, but I couldn't find a git remote for [elastic/elasticsearch]" + ) + ); + + LOGGER.info("Updating remote [{}]", upstream); + // Now update the remote, and make sure we update the tags too + gitWrapper.updateRemote(upstream); + + LOGGER.info("Updating tags from [{}]", upstream); + gitWrapper.updateTags(upstream); + } + + /** + * This methods checks the supplied version and answers {@code false} if the fetching of git + * tags can be skipped, or {@code true} otherwise. + *

+ * The first version in a minor series will never have any preceding versions, so there's no + * need to fetch tags and examine the repository state in the past. This applies when the + * version is a release version, a snapshot, or the first alpha version. Subsequent alphas, + * betas and release candidates need to check the previous prelease tags. + * + * @param versionString the version string to check + * @return whether fetching git tags is required + */ + @VisibleForTesting + static boolean needsGitTags(String versionString) { + if (versionString.endsWith(".0") || versionString.endsWith(".0-SNAPSHOT") || versionString.endsWith(".0-alpha1")) { + return false; + } + + return true; } @InputFiles diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GitWrapper.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GitWrapper.java new file mode 100644 index 0000000000000..9cfb649399a90 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GitWrapper.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import org.gradle.process.ExecOperations; + +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * This class wraps certain {@code git} operations. This is partly for convenience, and partly so that these + * operations can be easily mocked in testing. + */ +public class GitWrapper { + + private final ExecOperations execOperations; + + public GitWrapper(ExecOperations execOperations) { + this.execOperations = execOperations; + } + + /** + * @return a mapping from remote names to remote URLs. + */ + public Map listRemotes() { + return runCommand("git", "remote", "-v").lines() + .filter(l -> l.contains("(fetch)")) + .map(line -> line.split("\\s+")) + .collect(Collectors.toMap(parts -> parts[0], parts -> parts[1])); + } + + String runCommand(String... args) { + final ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + + execOperations.exec(spec -> { + // The redundant cast is to silence a compiler warning. + spec.setCommandLine((Object[]) args); + spec.setStandardOutput(stdout); + }); + + return stdout.toString(StandardCharsets.UTF_8); + } + + /** + * Updates the git repository from the specified remote + * @param remote the remote to use to update + */ + public void updateRemote(String remote) { + runCommand("git", "fetch", Objects.requireNonNull(remote)); + } + + /** + * Updates the git repository's tags from the specified remote + * @param remote the remote to use to update + */ + public void updateTags(String remote) { + runCommand("git", "fetch", "--tags", Objects.requireNonNull(remote)); + } + + /** + * Fetch all tags matching the specified pattern, returning them as {@link QualifiedVersion} instances. + * @param pattern the tag pattern to match + * @return matching versions + */ + public Stream listVersions(String pattern) { + return runCommand("git", "tag", "-l", pattern).lines().map(QualifiedVersion::of); + } + + /** + * Returns all files at the specified {@param path} for the state of the git repository at {@param ref}. + * + * @param ref the ref to use + * @param path the path to list + * @return A stream of file names. No path information is included. + */ + public Stream listFiles(String ref, String path) { + return runCommand("git", "ls-tree", "--name-only", "-r", ref, path).lines(); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/QualifiedVersion.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/QualifiedVersion.java new file mode 100644 index 0000000000000..0cc579e68d52a --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/QualifiedVersion.java @@ -0,0 +1,182 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import org.elasticsearch.gradle.Version; + +import java.util.Comparator; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Encapsulates comparison and printing logic for an x.y.z version with optional qualifier. This class is very similar + * to {@link Version}, but it dissects the qualifier in such a way that is incompatible + * with how {@link Version} is used in the build. It also retains any qualifier (prerelease) information, and uses + * that information when comparing instances. + */ +public final class QualifiedVersion implements Comparable { + private final int major; + private final int minor; + private final int revision; + private final Qualifier qualifier; + + private static final Pattern pattern = Pattern.compile( + "^v? (\\d+) \\. (\\d+) \\. (\\d+) (?: - (alpha\\d+ | beta\\d+ | rc\\d+ | SNAPSHOT ) )? $", + Pattern.COMMENTS + ); + + private QualifiedVersion(int major, int minor, int revision, String qualifier) { + this.major = major; + this.minor = minor; + this.revision = revision; + this.qualifier = qualifier == null ? null : Qualifier.of(qualifier); + } + + /** + * Parses the supplied string into an object. + * @param s a version string in strict semver + * @return a new instance + */ + public static QualifiedVersion of(final String s) { + Objects.requireNonNull(s); + Matcher matcher = pattern.matcher(s); + if (matcher.matches() == false) { + throw new IllegalArgumentException("Invalid version format: '" + s + "'. Should be " + pattern); + } + + return new QualifiedVersion( + Integer.parseInt(matcher.group(1)), + Integer.parseInt(matcher.group(2)), + Integer.parseInt(matcher.group(3)), + matcher.group(4) + ); + } + + @Override + public String toString() { + return "%d.%d.%d%s".formatted(major, minor, revision, qualifier == null ? "" : "-" + qualifier); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QualifiedVersion version = (QualifiedVersion) o; + return major == version.major + && minor == version.minor + && revision == version.revision + && Objects.equals(qualifier, version.qualifier); + } + + @Override + public int hashCode() { + return Objects.hash(major, minor, revision, qualifier); + } + + public int getMajor() { + return major; + } + + public int getMinor() { + return minor; + } + + public int getRevision() { + return revision; + } + + public boolean hasQualifier() { + return qualifier != null; + } + + public Qualifier getQualifier() { + return qualifier; + } + + public boolean isSnapshot() { + return this.qualifier != null && this.qualifier.level == QualifierLevel.SNAPSHOT; + } + + public QualifiedVersion withoutQualifier() { + return new QualifiedVersion(major, minor, revision, null); + } + + private static final Comparator COMPARATOR = Comparator.comparing((QualifiedVersion v) -> v.major) + .thenComparing(v -> v.minor) + .thenComparing(v -> v.revision) + .thenComparing((QualifiedVersion v) -> v.qualifier, Comparator.nullsLast(Comparator.naturalOrder())); + + @Override + public int compareTo(QualifiedVersion other) { + return COMPARATOR.compare(this, other); + } + + public boolean isBefore(QualifiedVersion other) { + return this.compareTo(other) < 0; + } + + private enum QualifierLevel { + alpha, + beta, + rc, + SNAPSHOT + } + + private static class Qualifier implements Comparable { + private final QualifierLevel level; + private final int number; + + private Qualifier(QualifierLevel level, int number) { + this.level = level; + this.number = number; + } + + private static final Comparator COMPARATOR = Comparator.comparing((Qualifier p) -> p.level).thenComparing(p -> p.number); + + @Override + public int compareTo(Qualifier other) { + return COMPARATOR.compare(this, other); + } + + private static Qualifier of(String qualifier) { + if ("SNAPSHOT".equals(qualifier)) { + return new Qualifier(QualifierLevel.SNAPSHOT, 0); + } + + Pattern pattern = Pattern.compile("^(alpha|beta|rc)(\\d+)$"); + Matcher matcher = pattern.matcher(qualifier); + if (matcher.find()) { + String level = matcher.group(1); + int number = Integer.parseInt(matcher.group(2)); + return new Qualifier(QualifierLevel.valueOf(level), number); + } else { + // This shouldn't happen - we check the format before this is called + throw new IllegalArgumentException("Invalid qualifier [" + qualifier + "] passed"); + } + } + + public String toString() { + return level == QualifierLevel.SNAPSHOT ? level.name() : level.name() + number; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Qualifier that = (Qualifier) o; + return number == that.number && level == that.level; + } + + @Override + public int hashCode() { + return Objects.hash(level, number); + } + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGenerator.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGenerator.java index 02b450aa22eea..97a210657f9c8 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGenerator.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGenerator.java @@ -12,12 +12,12 @@ import com.google.common.annotations.VisibleForTesting; -import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.VersionProperties; import java.io.File; import java.io.FileWriter; import java.io.IOException; +import java.io.Writer; import java.nio.file.Files; import java.util.ArrayList; import java.util.HashMap; @@ -32,12 +32,12 @@ public class ReleaseHighlightsGenerator { static void update(File templateFile, File outputFile, List entries) throws IOException { try (FileWriter output = new FileWriter(outputFile)) { - generateFile(VersionProperties.getElasticsearchVersion(), Files.readString(templateFile.toPath()), entries, output); + generateFile(QualifiedVersion.of(VersionProperties.getElasticsearch()), Files.readString(templateFile.toPath()), entries, output); } } @VisibleForTesting - static void generateFile(Version version, String templateFile, List entries, FileWriter outputWriter) + static void generateFile(QualifiedVersion version, String templateFile, List entries, Writer outputWriter) throws IOException { final List priorVersions = new ArrayList<>(); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGenerator.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGenerator.java index 52995717a435a..f89b8e8514bac 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGenerator.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGenerator.java @@ -8,10 +8,10 @@ package org.elasticsearch.gradle.internal.release; -import com.google.common.annotations.VisibleForTesting; import groovy.text.SimpleTemplateEngine; -import org.elasticsearch.gradle.Version; -import org.elasticsearch.gradle.VersionProperties; + +import com.google.common.annotations.VisibleForTesting; + import org.gradle.api.GradleException; import java.io.File; @@ -23,10 +23,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.TreeMap; -import java.util.function.Predicate; import java.util.stream.Collectors; +import static java.util.Comparator.comparing; +import static java.util.stream.Collectors.groupingBy; +import static java.util.stream.Collectors.toList; + /** * Generates the release notes i.e. list of changes that have gone into this release. They are grouped by the * type of change, then by team area. @@ -44,21 +48,23 @@ public class ReleaseNotesGenerator { TYPE_LABELS.put("deprecation", "Deprecations"); TYPE_LABELS.put("enhancement", "Enhancements"); TYPE_LABELS.put("feature", "New features"); + TYPE_LABELS.put("new-aggregation", "New aggregation"); TYPE_LABELS.put("regression", "Regressions"); TYPE_LABELS.put("upgrade", "Upgrades"); } - static void update(File templateFile, File outputFile, List changelogs) throws IOException { + static void update(File templateFile, File outputFile, Map> changelogs) throws IOException { final String templateString = Files.readString(templateFile.toPath()); try (FileWriter output = new FileWriter(outputFile)) { - generateFile(VersionProperties.getElasticsearchVersion(), templateString, changelogs, output); + generateFile(templateString, changelogs, output); } } @VisibleForTesting - static void generateFile(Version version, String template, List changelogs, Writer outputWriter) throws IOException { - final var changelogsByVersionByTypeByArea = buildChangelogBreakdown(version, changelogs); + static void generateFile(String template, Map> changelogs, Writer outputWriter) + throws IOException { + final var changelogsByVersionByTypeByArea = buildChangelogBreakdown(changelogs); final Map bindings = new HashMap<>(); bindings.put("changelogsByVersionByTypeByArea", changelogsByVersionByTypeByArea); @@ -72,52 +78,40 @@ static void generateFile(Version version, String template, List } } - private static Map>>> buildChangelogBreakdown( - Version elasticsearchVersion, - List changelogs + private static Map>>> buildChangelogBreakdown( + Map> changelogsByVersion ) { - final Predicate includedInSameMinor = v -> v.getMajor() == elasticsearchVersion.getMajor() - && v.getMinor() == elasticsearchVersion.getMinor(); - - final Map>>> changelogsByVersionByTypeByArea = changelogs.stream() - .collect( - Collectors.groupingBy( - // Key changelog entries by the earlier version in which they were released - entry -> entry.getVersions() - .stream() - .map(v -> Version.fromString(v.replaceFirst("^v", ""))) - .filter(includedInSameMinor) - .sorted() - .findFirst() - .get(), - - // Generate a reverse-ordered map. Despite the IDE saying the type can be inferred, removing it - // causes the compiler to complain. - () -> new TreeMap>>>(Comparator.reverseOrder()), - - // Group changelogs entries by their change type - Collectors.groupingBy( + Map>>> changelogsByVersionByTypeByArea = new TreeMap<>( + Comparator.reverseOrder() + ); + + changelogsByVersion.forEach((version, changelogs) -> { + Map>> changelogsByTypeByArea = changelogs.stream() + .collect( + groupingBy( // Entries with breaking info are always put in the breaking section entry -> entry.getBreaking() == null ? entry.getType() : "breaking", TreeMap::new, // Group changelogs for each type by their team area - Collectors.groupingBy( + groupingBy( // `security` and `known-issue` areas don't need to supply an area entry -> entry.getType().equals("known-issue") || entry.getType().equals("security") ? "_all_" : entry.getArea(), TreeMap::new, - Collectors.toList() + toList() ) ) - ) - ); + ); + + changelogsByVersionByTypeByArea.put(version, changelogsByTypeByArea); + }); // Sort per-area changelogs by their summary text. Assumes that the underlying list is sortable changelogsByVersionByTypeByArea.forEach( (_version, byVersion) -> byVersion.forEach( (_type, byTeam) -> byTeam.forEach( - (_team, changelogsForTeam) -> changelogsForTeam.sort(Comparator.comparing(ChangelogEntry::getSummary)) + (_team, changelogsForTeam) -> changelogsForTeam.sort(comparing(ChangelogEntry::getSummary)) ) ) ); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGenerator.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGenerator.java new file mode 100644 index 0000000000000..2f2e013027759 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGenerator.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import groovy.text.SimpleTemplateEngine; + +import com.google.common.annotations.VisibleForTesting; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.Writer; +import java.nio.file.Files; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import java.util.stream.Collectors; + +import static java.util.Comparator.reverseOrder; + +/** + * This class ensures that the release notes index page has the appropriate anchors and include directives + * for the current repository version. + */ +public class ReleaseNotesIndexGenerator { + + static void update(Set versions, File indexTemplate, File indexFile) throws IOException { + try (FileWriter indexFileWriter = new FileWriter(indexFile)) { + generateFile(versions, Files.readString(indexTemplate.toPath()), indexFileWriter); + } + } + + @VisibleForTesting + static void generateFile(Set versionsSet, String indexTemplate, Writer outputWriter) throws IOException { + final Set versions = new TreeSet<>(reverseOrder()); + + // For the purpose of generating the index, snapshot versions are the same as released versions. Prerelease versions are not. + versionsSet.stream().map(v -> v.isSnapshot() ? v.withoutQualifier() : v).forEach(versions::add); + + final List includeVersions = versions.stream() + .map(v -> v.hasQualifier() ? v.toString() : v.getMajor() + "." + v.getMinor()) + .distinct() + .collect(Collectors.toList()); + + final Map bindings = new HashMap<>(); + bindings.put("versions", versions); + bindings.put("includeVersions", includeVersions); + + try { + final SimpleTemplateEngine engine = new SimpleTemplateEngine(); + engine.createTemplate(indexTemplate).make(bindings).writeTo(outputWriter); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexUpdater.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexUpdater.java deleted file mode 100644 index 5403d1e03f303..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexUpdater.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.release; - -import groovy.text.SimpleTemplateEngine; - -import com.google.common.annotations.VisibleForTesting; - -import org.elasticsearch.gradle.Version; -import org.elasticsearch.gradle.VersionProperties; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.Writer; -import java.nio.file.Files; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -/** - * This class ensures that the release notes index page has the appropriate anchors and include directives - * for the current repository version. It achieves this by parsing out the existing entries and writing - * out the file again. - */ -public class ReleaseNotesIndexUpdater { - - static void update(File indexTemplate, File indexFile) throws IOException { - final List existingIndexLines = Files.readAllLines(indexFile.toPath()); - try (FileWriter indexFileWriter = new FileWriter(indexFile)) { - generateFile( - VersionProperties.getElasticsearchVersion(), - existingIndexLines, - Files.readString(indexTemplate.toPath()), - indexFileWriter - ); - } - } - - @VisibleForTesting - static void generateFile(Version version, List existingIndexLines, String indexTemplate, Writer outputWriter) - throws IOException { - final List existingVersions = existingIndexLines.stream() - .filter(line -> line.startsWith("* < line.replace("* <>", "")) - .distinct() - .collect(Collectors.toList()); - - final List existingIncludes = existingIndexLines.stream() - .filter(line -> line.startsWith("include::")) - .map(line -> line.replace("include::release-notes/", "").replace(".asciidoc[]", "")) - .distinct() - .collect(Collectors.toList()); - - final String versionString = version.toString(); - - if (existingVersions.contains(versionString) == false) { - int insertionIndex = existingVersions.size() - 1; - while (insertionIndex > 0 && Version.fromString(existingVersions.get(insertionIndex)).before(version)) { - insertionIndex -= 1; - } - existingVersions.add(insertionIndex, versionString); - } - - final String includeString = version.getMajor() + "." + version.getMinor(); - - if (existingIncludes.contains(includeString) == false) { - int insertionIndex = existingIncludes.size() - 1; - while (insertionIndex > 0 && Version.fromString(ensurePatchVersion(existingIncludes.get(insertionIndex))).before(version)) { - insertionIndex -= 1; - } - existingIncludes.add(insertionIndex, includeString); - } - - final Map bindings = new HashMap<>(); - bindings.put("existingVersions", existingVersions); - bindings.put("existingIncludes", existingIncludes); - - try { - final SimpleTemplateEngine engine = new SimpleTemplateEngine(); - engine.createTemplate(indexTemplate).make(bindings).writeTo(outputWriter); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } - } - - private static String ensurePatchVersion(String version) { - return version.matches("^\\d+\\.\\d+\\.\\d+.*$") ? version : version + ".0"; - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java index d7d85504a0178..368df62b90921 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java @@ -40,9 +40,11 @@ public ReleaseToolsPlugin(ProjectLayout projectLayout) { @Override public void apply(Project project) { - project.getPluginManager().apply(PrecommitTaskPlugin.class); + project.getPluginManager().apply(PrecommitTaskPlugin.class); final Directory projectDirectory = projectLayout.getProjectDirectory(); + final Version version = VersionProperties.getElasticsearchVersion(); + final FileTree yamlFiles = projectDirectory.dir("docs/changelog") .getAsFileTree() .matching(new PatternSet().include("**/*.yml", "**/*.yaml")); @@ -65,8 +67,6 @@ public void apply(Project project) { }); project.getTasks().register("generateReleaseNotes", GenerateReleaseNotesTask.class).configure(task -> { - final Version version = VersionProperties.getElasticsearchVersion(); - task.setGroup("Documentation"); task.setDescription("Generates release notes from changelog files held in this checkout"); task.setChangelogs(yamlFiles); @@ -92,6 +92,6 @@ public void apply(Project project) { task.dependsOn(validateChangelogsTask); }); - project.getTasks().named("precommit").configure(task -> task.dependsOn(validateChangelogsTask)); + project.getTasks().named("precommit").configure(task -> task.dependsOn(validateChangelogsTask)); } } diff --git a/build-tools-internal/src/main/resources/changelog-schema.json b/build-tools-internal/src/main/resources/changelog-schema.json index a2dfc5ecd306f..433c841ebbc8a 100644 --- a/build-tools-internal/src/main/resources/changelog-schema.json +++ b/build-tools-internal/src/main/resources/changelog-schema.json @@ -103,14 +103,6 @@ "type": "string", "minLength": 1 }, - "versions": { - "type": "array", - "items": { - "type": "string", - "pattern": "^v?\\d+\\.\\d+\\.\\d+$", - "minItems": 1 - } - }, "highlight": { "$ref": "#/definitions/Highlight" }, @@ -123,8 +115,7 @@ }, "required": [ "type", - "summary", - "versions" + "summary" ] }, "Highlight": { diff --git a/build-tools-internal/src/main/resources/templates/release-highlights.asciidoc b/build-tools-internal/src/main/resources/templates/release-highlights.asciidoc index 40b828d609745..6d349ac5c26c5 100644 --- a/build-tools-internal/src/main/resources/templates/release-highlights.asciidoc +++ b/build-tools-internal/src/main/resources/templates/release-highlights.asciidoc @@ -8,7 +8,7 @@ ifeval::[\\{release-state}\\"!=\\"unreleased\\"] For detailed information about this release, see the <> and <>. endif::[] -<% if (priorVersions.size > 0) { %> +<% if (priorVersions.size() > 0) { %> // Add previous release to the list Other versions: diff --git a/build-tools-internal/src/main/resources/templates/release-notes-index.asciidoc b/build-tools-internal/src/main/resources/templates/release-notes-index.asciidoc index 0b62b9b3f1e01..ba30aea3bf14e 100644 --- a/build-tools-internal/src/main/resources/templates/release-notes-index.asciidoc +++ b/build-tools-internal/src/main/resources/templates/release-notes-index.asciidoc @@ -6,7 +6,7 @@ This section summarizes the changes in each release. -<% existingVersions.each { print "* <>\n" } %> +<% versions.each { print "* <>\n" } %> -- -<% existingIncludes.each { print "include::release-notes/${ it }.asciidoc[]\n" } %> +<% includeVersions.each { print "include::release-notes/${ it }.asciidoc[]\n" } %> diff --git a/build-tools-internal/src/main/resources/templates/release-notes.asciidoc b/build-tools-internal/src/main/resources/templates/release-notes.asciidoc index 35384c8f4ce66..630af885aaaf5 100644 --- a/build-tools-internal/src/main/resources/templates/release-notes.asciidoc +++ b/build-tools-internal/src/main/resources/templates/release-notes.asciidoc @@ -1,13 +1,14 @@ <% for (version in changelogsByVersionByTypeByArea.keySet()) { -%>[[release-notes-$version]] -== {es} version $version -<% if (version.qualifier == "SNAPSHOT") { %> -coming[$version] +def unqualifiedVersion = version.withoutQualifier() +%>[[release-notes-$unqualifiedVersion]] +== {es} version ${unqualifiedVersion} +<% if (version.isSnapshot()) { %> +coming[$unqualifiedVersion] <% } %> Also see <>. <% if (changelogsByVersionByTypeByArea[version]["security"] != null) { %> [discrete] -[[security-updates-${version}]] +[[security-updates-${unqualifiedVersion}]] === Security updates <% for (change in changelogsByVersionByTypeByArea[version].remove("security").remove("_all_")) { @@ -16,7 +17,7 @@ Also see < [discrete] -[[known-issues-${version}]] +[[known-issues-${unqualifiedVersion}]] === Known issues <% for (change in changelogsByVersionByTypeByArea[version].remove("known-issue").remove("_all_")) { @@ -24,9 +25,9 @@ if (changelogsByVersionByTypeByArea[version]["known-issue"] != null) { %> } } for (changeType in changelogsByVersionByTypeByArea[version].keySet()) { %> -[[${ changeType }-${ version }]] +[[${ changeType }-${ unqualifiedVersion }]] [float] -=== ${ TYPE_LABELS[changeType] } +=== ${ TYPE_LABELS.getOrDefault(changeType, 'No mapping for TYPE_LABELS[' + changeType + ']') } <% for (team in changelogsByVersionByTypeByArea[version][changeType].keySet()) { print "\n${team}::\n"; @@ -41,5 +42,6 @@ for (changeType in changelogsByVersionByTypeByArea[version].keySet()) { %> } } } +print "\n\n" } %> diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java new file mode 100644 index 0000000000000..007e18c84aa50 --- /dev/null +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import org.junit.Test; + +import java.io.StringWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.List; +import java.util.Objects; + +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertThat; + +public class BreakingChangesGeneratorTest { + + /** + * Check that the breaking changes can be correctly generated. + */ + @Test + public void generateFile_rendersCorrectMarkup() throws Exception { + // given: + final String template = getResource("/templates/breaking-changes.asciidoc"); + final String expectedOutput = getResource( + "/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateFile.asciidoc" + ); + final StringWriter writer = new StringWriter(); + final List entries = getEntries(); + + // when: + BreakingChangesGenerator.generateFile(QualifiedVersion.of("8.4.0-SNAPSHOT"), template, writer, entries); + final String actualOutput = writer.toString(); + + // then: + assertThat(actualOutput, equalTo(expectedOutput)); + } + + private List getEntries() { + ChangelogEntry entry1 = new ChangelogEntry(); + ChangelogEntry.Breaking breaking1 = new ChangelogEntry.Breaking(); + entry1.setBreaking(breaking1); + + breaking1.setNotable(true); + breaking1.setTitle("Breaking change number 1"); + breaking1.setArea("API"); + breaking1.setDetails("Breaking change details 1"); + breaking1.setImpact("Breaking change impact description 1"); + + ChangelogEntry entry2 = new ChangelogEntry(); + ChangelogEntry.Breaking breaking2 = new ChangelogEntry.Breaking(); + entry2.setBreaking(breaking2); + + breaking2.setNotable(true); + breaking2.setTitle("Breaking change number 2"); + breaking2.setArea("Cluster"); + breaking2.setDetails("Breaking change details 2"); + breaking2.setImpact("Breaking change impact description 2"); + + ChangelogEntry entry3 = new ChangelogEntry(); + ChangelogEntry.Breaking breaking3 = new ChangelogEntry.Breaking(); + entry3.setBreaking(breaking3); + + breaking3.setNotable(false); + breaking3.setTitle("Breaking change number 3"); + breaking3.setArea("Transform"); + breaking3.setDetails("Breaking change details 3"); + breaking3.setImpact("Breaking change impact description 3"); + + return List.of(entry1, entry2, entry3); + } + + private String getResource(String name) throws Exception { + return Files.readString(Paths.get(Objects.requireNonNull(this.getClass().getResource(name)).toURI()), StandardCharsets.UTF_8); + } +} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java new file mode 100644 index 0000000000000..12f2712549706 --- /dev/null +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java @@ -0,0 +1,307 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import org.elasticsearch.gradle.internal.test.GradleUnitTestCase; +import org.junit.Before; +import org.junit.Test; + +import java.io.File; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +public class GenerateReleaseNotesTaskTest extends GradleUnitTestCase { + private GitWrapper gitWrapper; + + @Before + public void setup() { + this.gitWrapper = mock(GitWrapper.class); + } + + /** + * Check that the task does not update git tags if the current version is a snapshot of the first patch release. + */ + @Test + public void needsGitTags_withFirstSnapshot_returnsFalse() { + assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.0-SNAPSHOT"), is(false)); + } + + /** + * Check that the task does update git tags if the current version is a snapshot after the first patch release. + */ + @Test + public void needsGitTags_withLaterSnapshot_returnsTrue() { + assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.1-SNAPSHOT"), is(true)); + } + + /** + * Check that the task does not update git tags if the current version is the first patch release in a minor series. + */ + @Test + public void needsGitTags_withFirstPatchRelease_returnsFalse() { + assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.0"), is(false)); + } + + /** + * Check that the task does update git tags if the current version is later than the first patch release in a minor series. + */ + @Test + public void needsGitTags_withLaterPatchRelease_returnsTrue() { + assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.1"), is(true)); + } + + /** + * Check that the task does not update git tags if the current version is a first alpha prerelease. + */ + @Test + public void needsGitTags_withFirsAlphaRelease_returnsFalse() { + assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.0-alpha1"), is(false)); + } + + /** + * Check that the task does update git tags if the current version is a prerelease after the first alpha. + */ + @Test + public void needsGitTags_withLaterAlphaRelease_returnsFalse() { + assertThat(GenerateReleaseNotesTask.needsGitTags("8.0.0-alpha2"), is(true)); + } + + /** + * Check that partitioning changelog files when the current version is a snapshot returns a map with a single entry. + */ + @Test + public void partitionFiles_withSnapshot_returnsSingleMapping() { + // when: + Map> partitionedFiles = GenerateReleaseNotesTask.partitionFilesByVersion( + gitWrapper, + "8.0.0-SNAPSHOT", + Set.of(new File("docs/changelog/1234.yaml")) + ); + + // then: + assertThat(partitionedFiles, aMapWithSize(1)); + assertThat( + partitionedFiles, + hasEntry(equalTo(QualifiedVersion.of("8.0.0-SNAPSHOT")), hasItem(new File("docs/changelog/1234.yaml"))) + ); + verifyZeroInteractions(gitWrapper); + } + + /** + * Check that partitioning changelog files when the current version is the first release + * in a minor series returns a map with a single entry. + */ + @Test + public void partitionFiles_withFirstRevision_returnsSingleMapping() { + // when: + Map> partitionedFiles = GenerateReleaseNotesTask.partitionFilesByVersion( + gitWrapper, + "8.5.0", + Set.of(new File("docs/changelog/1234.yaml")) + ); + + // then: + assertThat(partitionedFiles, aMapWithSize(1)); + assertThat(partitionedFiles, hasEntry(equalTo(QualifiedVersion.of("8.5.0")), hasItem(new File("docs/changelog/1234.yaml")))); + verifyZeroInteractions(gitWrapper); + } + + /** + * Check that partitioning changelog files when the current version is the first alpha prerelease returns a map with a single entry. + */ + @Test + public void partitionFiles_withFirstAlpha_returnsSingleMapping() { + // when: + Map> partitionedFiles = GenerateReleaseNotesTask.partitionFilesByVersion( + gitWrapper, + "8.0.0-alpha1", + Set.of(new File("docs/changelog/1234.yaml")) + ); + + // then: + assertThat(partitionedFiles, aMapWithSize(1)); + assertThat(partitionedFiles, hasEntry(equalTo(QualifiedVersion.of("8.0.0-alpha1")), hasItem(new File("docs/changelog/1234.yaml")))); + verifyZeroInteractions(gitWrapper); + } + + /** + * Check that when deriving a lit of versions from git tags, the current unreleased version is included. + */ + @Test + public void getVersions_includesCurrentVersion() { + // given: + when(gitWrapper.listVersions(anyString())).thenReturn( + Stream.of("8.0.0-alpha1", "8.0.0-alpha2", "8.0.0-beta1", "8.0.0-beta2", "8.0.0-beta3", "8.0.0-rc1", "8.0.0") + .map(QualifiedVersion::of) + ); + + // when: + Set versions = GenerateReleaseNotesTask.getVersions(gitWrapper, "8.0.0-SNAPSHOT"); + + // then: + assertThat( + versions, + containsInAnyOrder( + Stream.of( + "8.0.0-alpha1", + "8.0.0-alpha2", + "8.0.0-beta1", + "8.0.0-beta2", + "8.0.0-beta3", + "8.0.0-rc1", + "8.0.0", + "8.0.0-SNAPSHOT" + ).map(QualifiedVersion::of).collect(Collectors.toList()).toArray(new QualifiedVersion[] {}) + ) + ); + } + + /** + * Check that the task partitions the list of files correctly by version for a prerelease. + */ + @Test + public void partitionFiles_withPrerelease_correctlyGroupsByPrereleaseVersion() { + // given: + when(gitWrapper.listVersions(anyString())).thenReturn( + Stream.of("8.0.0-alpha1", "8.0.0-alpha2", "8.0.0-beta1", "8.0.0-beta2", "8.0.0-beta3", "8.0.0-rc1", "8.0.0") + .map(QualifiedVersion::of) + ); + when(gitWrapper.listFiles(eq("v8.0.0-alpha1"), anyString())).thenReturn( + Stream.of("docs/changelog/1_1234.yaml", "docs/changelog/1_5678.yaml") + ); + when(gitWrapper.listFiles(eq("v8.0.0-alpha2"), anyString())).thenReturn( + Stream.of("docs/changelog/2_1234.yaml", "docs/changelog/2_5678.yaml") + ); + + Set allFiles = Set.of( + new File("docs/changelog/1_1234.yaml"), + new File("docs/changelog/1_5678.yaml"), + new File("docs/changelog/2_1234.yaml"), + new File("docs/changelog/2_5678.yaml"), + new File("docs/changelog/3_1234.yaml"), + new File("docs/changelog/3_5678.yaml") + ); + + // when: + Map> partitionedFiles = GenerateReleaseNotesTask.partitionFilesByVersion(gitWrapper, "8.0.0-beta1", allFiles); + + // then: + verify(gitWrapper).listVersions("v8.0*"); + verify(gitWrapper).listFiles("v8.0.0-alpha1", "docs/changelog"); + verify(gitWrapper).listFiles("v8.0.0-alpha2", "docs/changelog"); + + assertThat( + partitionedFiles, + allOf( + aMapWithSize(3), + hasKey(QualifiedVersion.of("8.0.0-alpha1")), + hasKey(QualifiedVersion.of("8.0.0-alpha2")), + hasKey(QualifiedVersion.of("8.0.0-beta1")) + ) + ); + + assertThat( + partitionedFiles, + allOf( + hasEntry( + equalTo(QualifiedVersion.of("8.0.0-alpha1")), + containsInAnyOrder(new File("docs/changelog/1_1234.yaml"), new File("docs/changelog/1_5678.yaml")) + ), + hasEntry( + equalTo(QualifiedVersion.of("8.0.0-alpha2")), + containsInAnyOrder(new File("docs/changelog/2_1234.yaml"), new File("docs/changelog/2_5678.yaml")) + ), + hasEntry( + equalTo(QualifiedVersion.of("8.0.0-beta1")), + containsInAnyOrder(new File("docs/changelog/3_1234.yaml"), new File("docs/changelog/3_5678.yaml")) + ) + ) + ); + } + + /** + * Check that the task partitions the list of files correctly by version for a patch release. + */ + @Test + public void partitionFiles_withPatchRelease_correctlyGroupsByPatchVersion() { + // given: + when(gitWrapper.listVersions(anyString())).thenReturn( + Stream.of("8.0.0-alpha1", "8.0.0-alpha2", "8.0.0-beta1", "8.0.0-rc1", "8.0.0", "8.0.1", "8.0.2", "8.1.0") + .map(QualifiedVersion::of) + ); + when(gitWrapper.listFiles(eq("v8.0.0"), anyString())).thenReturn( + Stream.of("docs/changelog/1_1234.yaml", "docs/changelog/1_5678.yaml") + ); + when(gitWrapper.listFiles(eq("v8.0.1"), anyString())).thenReturn( + Stream.of("docs/changelog/2_1234.yaml", "docs/changelog/2_5678.yaml") + ); + + Set allFiles = Set.of( + new File("docs/changelog/1_1234.yaml"), + new File("docs/changelog/1_5678.yaml"), + new File("docs/changelog/2_1234.yaml"), + new File("docs/changelog/2_5678.yaml"), + new File("docs/changelog/3_1234.yaml"), + new File("docs/changelog/3_5678.yaml") + ); + + // when: + Map> partitionedFiles = GenerateReleaseNotesTask.partitionFilesByVersion(gitWrapper, "8.0.2", allFiles); + + // then: + verify(gitWrapper).listVersions("v8.0*"); + verify(gitWrapper).listFiles("v8.0.0", "docs/changelog"); + verify(gitWrapper).listFiles("v8.0.1", "docs/changelog"); + + assertThat( + partitionedFiles, + allOf( + aMapWithSize(3), + hasKey(QualifiedVersion.of("8.0.0")), + hasKey(QualifiedVersion.of("8.0.1")), + hasKey(QualifiedVersion.of("8.0.2")) + ) + ); + + assertThat( + partitionedFiles, + allOf( + hasEntry( + equalTo(QualifiedVersion.of("8.0.0")), + containsInAnyOrder(new File("docs/changelog/1_1234.yaml"), new File("docs/changelog/1_5678.yaml")) + ), + hasEntry( + equalTo(QualifiedVersion.of("8.0.1")), + containsInAnyOrder(new File("docs/changelog/2_1234.yaml"), new File("docs/changelog/2_5678.yaml")) + ), + hasEntry( + equalTo(QualifiedVersion.of("8.0.2")), + containsInAnyOrder(new File("docs/changelog/3_1234.yaml"), new File("docs/changelog/3_5678.yaml")) + ) + ) + ); + } +} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java new file mode 100644 index 0000000000000..22bf411fe73b3 --- /dev/null +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import org.junit.Test; + +import java.io.StringWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.List; +import java.util.Objects; + +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertThat; + +public class ReleaseHighlightsGeneratorTest { + + /** + * Check that the release highlights can be correctly generated. + */ + @Test + public void generateFile_rendersCorrectMarkup() throws Exception { + // given: + final String template = getResource("/templates/release-highlights.asciidoc"); + final String expectedOutput = getResource( + "/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc" + ); + final StringWriter writer = new StringWriter(); + final List entries = getEntries(); + + // when: + ReleaseHighlightsGenerator.generateFile(QualifiedVersion.of("8.4.0-SNAPSHOT"), template, entries, writer); + final String actualOutput = writer.toString(); + + // then: + assertThat(actualOutput, equalTo(expectedOutput)); + } + + private List getEntries() { + ChangelogEntry entry1 = new ChangelogEntry(); + ChangelogEntry.Highlight highlight1 = new ChangelogEntry.Highlight(); + entry1.setHighlight(highlight1); + + highlight1.setNotable(true); + highlight1.setTitle("Notable release highlight number 1"); + highlight1.setBody("Notable release body number 1"); + + ChangelogEntry entry2 = new ChangelogEntry(); + ChangelogEntry.Highlight highlight2 = new ChangelogEntry.Highlight(); + entry2.setHighlight(highlight2); + + highlight2.setNotable(true); + highlight2.setTitle("Notable release highlight number 2"); + highlight2.setBody("Notable release body number 2"); + + ChangelogEntry entry3 = new ChangelogEntry(); + ChangelogEntry.Highlight highlight3 = new ChangelogEntry.Highlight(); + entry3.setHighlight(highlight3); + + highlight3.setNotable(false); + highlight3.setTitle("Notable release highlight number 3"); + highlight3.setBody("Notable release body number 3"); + + return List.of(entry1, entry2, entry3); + } + + private String getResource(String name) throws Exception { + return Files.readString(Paths.get(Objects.requireNonNull(this.getClass().getResource(name)).toURI()), StandardCharsets.UTF_8); + } +} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java new file mode 100644 index 0000000000000..2700bb8a35d37 --- /dev/null +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java @@ -0,0 +1,126 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import org.junit.Test; + +import java.io.StringWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertThat; + +public class ReleaseNotesGeneratorTest { + + /** + * Check that the release notes can be correctly generated. + */ + @Test + public void generateFile_rendersCorrectMarkup() throws Exception { + // given: + final String template = getResource("/templates/release-notes.asciidoc"); + final String expectedOutput = getResource( + "/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.generateFile.asciidoc" + ); + final StringWriter writer = new StringWriter(); + final Map> entries = getEntries(); + + // when: + ReleaseNotesGenerator.generateFile(template, entries, writer); + final String actualOutput = writer.toString(); + + // then: + assertThat(actualOutput, equalTo(expectedOutput)); + } + + private Map> getEntries() { + final Set entries_8_2_0 = new HashSet<>(); + entries_8_2_0.addAll(buildEntries(1, 2)); + entries_8_2_0.addAll(buildEntries(2, 2)); + entries_8_2_0.addAll(buildEntries(3, 2)); + + final Set entries_8_1_0 = new HashSet<>(); + entries_8_1_0.addAll(buildEntries(4, 2)); + entries_8_1_0.addAll(buildEntries(5, 2)); + entries_8_1_0.addAll(buildEntries(6, 2)); + + final Set entries_8_0_0 = new HashSet<>(); + entries_8_0_0.addAll(buildEntries(7, 2)); + entries_8_0_0.addAll(buildEntries(8, 2)); + entries_8_0_0.addAll(buildEntries(9, 2)); + + // Security issues are presented first in the notes + final ChangelogEntry securityEntry = new ChangelogEntry(); + securityEntry.setArea("Security"); + securityEntry.setType("security"); + securityEntry.setSummary("Test security issue"); + entries_8_2_0.add(securityEntry); + + // known issues are presented after security issues + final ChangelogEntry knownIssue = new ChangelogEntry(); + knownIssue.setArea("Search"); + knownIssue.setType("known-issue"); + knownIssue.setSummary("Test known issue"); + entries_8_1_0.add(knownIssue); + + final Map> result = new HashMap<>(); + + result.put(QualifiedVersion.of("8.2.0-SNAPSHOT"), entries_8_2_0); + result.put(QualifiedVersion.of("8.1.0"), entries_8_1_0); + result.put(QualifiedVersion.of("8.0.0"), entries_8_0_0); + + return result; + } + + private List buildEntries(int seed, int count) { + // Sample of possible areas from `changelog-schema.json` + final List areas = List.of("Aggregation", "Cluster", "Indices", "Mappings", "Search", "Security"); + // Possible change types, with `breaking`, `breaking-java`, `known-issue` and `security` removed. + final List types = List.of("bug", "deprecation", "enhancement", "feature", "new-aggregation", "regression", "upgrade"); + + final String area = areas.get(seed % areas.size()); + final String type = types.get(seed % types.size()); + + final List entries = new ArrayList<>(count); + + int base = seed * 1000; + + for (int i = 0; i < count; i++) { + + final ChangelogEntry e = new ChangelogEntry(); + e.setPr(base++); + e.setArea(area); + e.setSummary("Test changelog entry " + seed + "_" + i); + e.setType(type); + + List issues = new ArrayList<>(count); + for (int j = 0; j <= i; j++) { + issues.add(base++); + } + e.setIssues(issues); + + entries.add(e); + } + + return entries; + } + + private String getResource(String name) throws Exception { + return Files.readString(Paths.get(Objects.requireNonNull(this.getClass().getResource(name)).toURI()), StandardCharsets.UTF_8); + } +} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.java new file mode 100644 index 0000000000000..10ffcf41857ef --- /dev/null +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import org.junit.Test; + +import java.io.StringWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertThat; + +public class ReleaseNotesIndexGeneratorTest { + + /** + * Check that a release notes index can be generated. + */ + @Test + public void generateFile_rendersCorrectMarkup() throws Exception { + // given: + final Set versions = Stream.of( + "8.0.0-alpha1", + "8.0.0-beta2", + "8.0.0-rc3", + "8.0.0", + "8.0.1", + "8.0.2", + "8.1.0", + "8.1.1", + "8.2.0-SNAPSHOT" + ).map(QualifiedVersion::of).collect(Collectors.toSet()); + + final String template = getResource("/templates/release-notes-index.asciidoc"); + final String expectedOutput = getResource( + "/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.generateFile.asciidoc" + ); + final StringWriter writer = new StringWriter(); + + // when: + ReleaseNotesIndexGenerator.generateFile(versions, template, writer); + final String actualOutput = writer.toString(); + + // then: + assertThat(actualOutput, equalTo(expectedOutput)); + } + + private String getResource(String name) throws Exception { + return Files.readString(Paths.get(Objects.requireNonNull(this.getClass().getResource(name)).toURI()), StandardCharsets.UTF_8); + } +} diff --git a/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateFile.asciidoc b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateFile.asciidoc new file mode 100644 index 0000000000000..4a61c2de4016f --- /dev/null +++ b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateFile.asciidoc @@ -0,0 +1,81 @@ +[[migrating-8.4]] +== Migrating to 8.4 +++++ +8.4 +++++ + +This section discusses the changes that you need to be aware of when migrating +your application to {es} 8.4. + +See also <> and <>. + +coming[8.4.0-SNAPSHOT] + +//NOTE: The notable-breaking-changes tagged regions are re-used in the +//Installation and Upgrade Guide + +[discrete] +[[breaking-changes-8.4]] +=== Breaking changes + +The following changes in {es} 8.4 might affect your applications +and prevent them from operating normally. +Before upgrading to 8.4 review these changes and take the described steps +to mitigate the impact. + +NOTE: Breaking changes introduced in minor versions are +normally limited to security and bug fixes. +Significant changes in behavior are deprecated in a minor release and +the old behavior is supported until the next major release. +To find out if you are using any deprecated functionality, +enable <>. + +// tag::notable-breaking-changes[] +[discrete] +[[breaking_84_api]] +==== API + +[[breaking_change_number_1]] +.Breaking change number 1 +[%collapsible] +==== +*Details* + +Breaking change details 1 + +*Impact* + +Breaking change impact description 1 +==== +// end::notable-breaking-changes[] + +// tag::notable-breaking-changes[] +[discrete] +[[breaking_84_cluster]] +==== Cluster + +[[breaking_change_number_2]] +.Breaking change number 2 +[%collapsible] +==== +*Details* + +Breaking change details 2 + +*Impact* + +Breaking change impact description 2 +==== +// end::notable-breaking-changes[] + +[discrete] +[[breaking_84_transform]] +==== Transform + +[[breaking_change_number_3]] +.Breaking change number 3 +[%collapsible] +==== +*Details* + +Breaking change details 3 + +*Impact* + +Breaking change impact description 3 +==== + diff --git a/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc new file mode 100644 index 0000000000000..25438cbe72b8a --- /dev/null +++ b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc @@ -0,0 +1,40 @@ +[[release-highlights]] +== What's new in {minor-version} + +coming::[{minor-version}] + +Here are the highlights of what's new and improved in {es} {minor-version}! +ifeval::[\{release-state}\"!=\"unreleased\"] +For detailed information about this release, see the <> and +<>. +endif::[] + +// Add previous release to the list +Other versions: + +{ref-bare}/8.4/release-highlights.html[8.4] +| {ref-bare}/8.3/release-highlights.html[8.3] +| {ref-bare}/8.2/release-highlights.html[8.2] +| {ref-bare}/8.1/release-highlights.html[8.1] +| {ref-bare}/8.0/release-highlights.html[8.0] + +// tag::notable-highlights[] + +[discrete] +[[notable_release_highlight_number_1]] +=== Notable release highlight number 1 +Notable release body number 1 + +[discrete] +[[notable_release_highlight_number_2]] +=== Notable release highlight number 2 +Notable release body number 2 + +// end::notable-highlights[] + + +[discrete] +[[notable_release_highlight_number_3]] +=== Notable release highlight number 3 +Notable release body number 3 + diff --git a/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.generateFile.asciidoc b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.generateFile.asciidoc new file mode 100644 index 0000000000000..d81a3d8df7f49 --- /dev/null +++ b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.generateFile.asciidoc @@ -0,0 +1,105 @@ +[[release-notes-8.2.0]] +== {es} version 8.2.0 + +coming[8.2.0] + +Also see <>. + +[discrete] +[[security-updates-8.2.0]] +=== Security updates + +* Test security issue + +[[deprecation-8.2.0]] +[float] +=== Deprecations + +Cluster:: +* Test changelog entry 1_0 {es-pull}1000[#1000] (issue: {es-issue}1001[#1001]) +* Test changelog entry 1_1 {es-pull}1002[#1002] (issues: {es-issue}1003[#1003], {es-issue}1004[#1004]) + +[[enhancement-8.2.0]] +[float] +=== Enhancements + +Indices:: +* Test changelog entry 2_0 {es-pull}2000[#2000] (issue: {es-issue}2001[#2001]) +* Test changelog entry 2_1 {es-pull}2002[#2002] (issues: {es-issue}2003[#2003], {es-issue}2004[#2004]) + +[[feature-8.2.0]] +[float] +=== New features + +Mappings:: +* Test changelog entry 3_0 {es-pull}3000[#3000] (issue: {es-issue}3001[#3001]) +* Test changelog entry 3_1 {es-pull}3002[#3002] (issues: {es-issue}3003[#3003], {es-issue}3004[#3004]) + + +[[release-notes-8.1.0]] +== {es} version 8.1.0 + +Also see <>. + +[discrete] +[[known-issues-8.1.0]] +=== Known issues + +* Test known issue + +[[new-aggregation-8.1.0]] +[float] +=== New aggregation + +Search:: +* Test changelog entry 4_0 {es-pull}4000[#4000] (issue: {es-issue}4001[#4001]) +* Test changelog entry 4_1 {es-pull}4002[#4002] (issues: {es-issue}4003[#4003], {es-issue}4004[#4004]) + +[[regression-8.1.0]] +[float] +=== Regressions + +Security:: +* Test changelog entry 5_0 {es-pull}5000[#5000] (issue: {es-issue}5001[#5001]) +* Test changelog entry 5_1 {es-pull}5002[#5002] (issues: {es-issue}5003[#5003], {es-issue}5004[#5004]) + +[[upgrade-8.1.0]] +[float] +=== Upgrades + +Aggregation:: +* Test changelog entry 6_0 {es-pull}6000[#6000] (issue: {es-issue}6001[#6001]) +* Test changelog entry 6_1 {es-pull}6002[#6002] (issues: {es-issue}6003[#6003], {es-issue}6004[#6004]) + + +[[release-notes-8.0.0]] +== {es} version 8.0.0 + +Also see <>. + +[[bug-8.0.0]] +[float] +=== Bug fixes + +Cluster:: +* Test changelog entry 7_0 {es-pull}7000[#7000] (issue: {es-issue}7001[#7001]) +* Test changelog entry 7_1 {es-pull}7002[#7002] (issues: {es-issue}7003[#7003], {es-issue}7004[#7004]) + +[[deprecation-8.0.0]] +[float] +=== Deprecations + +Indices:: +* Test changelog entry 8_0 {es-pull}8000[#8000] (issue: {es-issue}8001[#8001]) +* Test changelog entry 8_1 {es-pull}8002[#8002] (issues: {es-issue}8003[#8003], {es-issue}8004[#8004]) + +[[enhancement-8.0.0]] +[float] +=== Enhancements + +Mappings:: +* Test changelog entry 9_0 {es-pull}9000[#9000] (issue: {es-issue}9001[#9001]) +* Test changelog entry 9_1 {es-pull}9002[#9002] (issues: {es-issue}9003[#9003], {es-issue}9004[#9004]) + + + diff --git a/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.generateFile.asciidoc b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.generateFile.asciidoc new file mode 100644 index 0000000000000..64c07d930930f --- /dev/null +++ b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.generateFile.asciidoc @@ -0,0 +1,27 @@ +[[es-release-notes]] += Release notes + +[partintro] +-- + +This section summarizes the changes in each release. + +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> + +-- + +include::release-notes/8.2.asciidoc[] +include::release-notes/8.1.asciidoc[] +include::release-notes/8.0.asciidoc[] +include::release-notes/8.0.0-rc3.asciidoc[] +include::release-notes/8.0.0-beta2.asciidoc[] +include::release-notes/8.0.0-alpha1.asciidoc[] + diff --git a/docs/changelog/70635.yaml b/docs/changelog/70635.yaml index d877a7bbba0d5..e118ae31c27cb 100644 --- a/docs/changelog/70635.yaml +++ b/docs/changelog/70635.yaml @@ -3,6 +3,3 @@ summary: Tighten up write permissions in Docker image area: Packaging type: enhancement issues: [] -versions: - - v8.0.0 - - v7.15.0 diff --git a/docs/changelog/75981.yaml b/docs/changelog/75981.yaml index 8b7d8a03136d6..ac1018f8ecb0c 100644 --- a/docs/changelog/75981.yaml +++ b/docs/changelog/75981.yaml @@ -3,7 +3,3 @@ summary: Bump bundled JDK to 16.0.2 area: Packaging type: upgrade issues: [] -versions: - - v8.0.0 - - v7.14.1 - - v7.15.0 diff --git a/docs/changelog/76192.yaml b/docs/changelog/76192.yaml index 6d0f3d7262065..4639d84e122e6 100644 --- a/docs/changelog/76192.yaml +++ b/docs/changelog/76192.yaml @@ -5,6 +5,3 @@ type: enhancement issues: - 76148 - 74327 -versions: - - v8.0.0 - - v7.15.0 From eec1a181c9347e5a61bcc01926078b7b38443ac3 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 1 Sep 2021 09:32:01 +0200 Subject: [PATCH 051/128] Document general gradle build guidelines (#76551) This introduces a general document to track guidelines for working on and with the elasticsearch gradle build Co-authored-by: Rory Hunter --- BUILDING.md | 120 ++++++++++++++++++++++++++++++++++++++++++++++++ CONTRIBUTING.md | 2 + 2 files changed, 122 insertions(+) create mode 100644 BUILDING.md diff --git a/BUILDING.md b/BUILDING.md new file mode 100644 index 0000000000000..0d2534c0bf07a --- /dev/null +++ b/BUILDING.md @@ -0,0 +1,120 @@ +Building Elasticsearch with Gradle +============================= + +Elasticsearch is built using the [Gradle](https://gradle.org/) open source build tools. + +This document provides a general guidelines for using and working on the elasticsearch build logic. + +## Build logic organisation + +The Elasticsearch project contains 3 build-related projects that are included into the Elasticsearch build as a [composite build](https://docs.gradle.org/current/userguide/composite_builds.html). + +### `build-conventions` + +This project contains build conventions that are applied to all elasticsearch projects. + +### `build-tools` + +This project contains all build logic that we publish for third party elasticsearch plugin authors. +We provide the following plugins: + +- `elasticsearch.esplugin` - A gradle plugin for building an elasticsearch plugin. +- `elasticsearch.testclusters` - A gradle plugin for setting up es clusters for testing within a build. + +This project is published as part of the elasticsearch release and accessible by +`org.elasticsearch.gradle:build-tools:`. +These build tools are also used by the `elasticsearch-hadoop` project maintained by elastic. + +### `build-tools-internal` + +This project contains all elasticsearch project specific build logic that is not meant to be shared +with other internal or external projects. + +## Build guidelines + +This is an intentionally small set of guidelines to build users and authors +to ensure we keep the build consistent. We also publish elasticsearch build logic +as `build-tools` to be usuable by thirdparty elasticsearch plugin authors. This is +also used by other elastic teams like `elasticsearch-hadoop`. +Breaking changes should therefore be avoided and an appropriate deprecation cycle +should be followed. + +### Stay up to date + +The elasticsearch build usually uses the latest Gradle GA release. We stay as close to the +latest Gradle releases as possible. In certain cases an update is blocked by a breaking behaviour +in Gradle. We're usually in contact with the gradle team here or working on a fix +in our build logic to resolve this. + +**The Elasticsearch build will fail if any deprecated Gradle API is used.** + +### Make a change in the build + +There are a few guidelines to follow that should make your life easier to make changes to the elasticsearch build. +Please add a member of the `es-delivery` team as a reviewer if you're making non-trivial changes to the build. + +#### Custom Plugin and Task implementations + +Build logic that is used across multiple subprojects should considered to be moved into a Gradle plugin with according Gradle task implmentation. +Elasticsearch specific build logic is located in the `build-tools-internal` subproject including integration tests. + +- Gradle plugins and Tasks should be written in Java +- We use a groovy and spock for setting up Gradle integration tests. + (see https://github.com/elastic/elasticsearch/blob/master/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy) + +#### Declaring tasks + +The elasticsearch build makes use of the [task avoidance API](https://docs.gradle.org/current/userguide/task_configuration_avoidance.html) to keep the configuration time of the build low. + +When declaring tasks (in build scripts or custom plugins) this means that we want to _register_ a task like: + + tasks.register('someTask') { ... } + +instead of eagerly _creating_ the task: + + task someTask { ... } + +The major difference between these two syntaxes is, that the configuration block of an registered task will only be executed when the task is actually created due to the build requires that task to run. The configuration block of an eagerly created tasks will be executed immediately. + +By actually doing less in the gradle configuration time as only creating tasks that are requested as part of the build and by only running the configurations for those requested tasks, using the task avoidance api contributes a major part in keeping our build fast. + +#### Adding additional integration tests + +Additional integration tests for a certain elasticsearch modules that are specific to certain cluster configuration can be declared in a separate so called `qa` subproject of your module. + +The benefit of a dedicated project for these tests are: +- `qa` projects are dedicated two specific usecases and easier to maintain +- It keeps the specific test logic separated from the common test logic. +- You can run those tests in parallel to other projects of the build. + +#### Using test fixtures + +Sometimes we want to share test fixtures to setup the code under test across multiple projects. There are basically two ways doing so. + +Ideally we would use the build-in [java-test-fixtures](https://docs.gradle.org/current/userguide/java_testing.html#sec:java_test_fixtures) gradle plugin. +This plugin relies on having a separate sourceSet for the test fixtures code. + +In the elasticsearch codebase we have test fixtures and actual tests within the same sourceSet. Therefore we introduced the `elasticsearch.internal-test-artifact` plugin to provides another build artifact of your project based on the `test` sourceSet. + + +This artifact can be resolved by the consumer project as shown in the example below: + +``` +dependencies { + //add the test fixtures of `:providing-project` to testImplementation configuration. + testImplementation(testArtifact(project(":fixture-providing-project'))) +} +``` + +This test artifact mechanism makes use of the concept of [component capabilities](https://docs.gradle.org/current/userguide/component_capabilities.html) +similar to how the gradle build-in `java-test-fixtures` plugin works. + +`testArtifact` is a shortcut declared in the elasticsearch build. Alternatively you can declare the dependency via + +``` +dependencies { + testImplementation(project(":fixture-providing-project')) { + requireCapabilities("org.elasticsearch.gradle:fixture-providing-project-test-artifacts") + } +} +``` diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3e08516f86814..73457580c1b29 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -69,6 +69,8 @@ cycle. * Lines that are not part of your change should not be edited (e.g. don't format unchanged lines, don't reorder existing imports) * Add the appropriate [license headers](#license-headers) to any new files +* For contributions involving the elasticsearch build you can find (details about the build setup in the +* [BUILDING](BUILDING.md) file ### Submitting your changes From 5ffada50f67aa47fbc03c0d9b8aaeefa0fff9179 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 1 Sep 2021 10:16:28 +0200 Subject: [PATCH 052/128] Fix YamlRestTestPluginFuncTest on Windows (#77063) * Fix YamlRestTestPluginFuncTest on Windows - need to normalize output before comparison - should fix #77060 * Minor cleanup after review --- .../gradle/test/YamlRestTestPluginFuncTest.groovy | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/YamlRestTestPluginFuncTest.groovy b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/YamlRestTestPluginFuncTest.groovy index af3d7f4e71c92..30aad8054b08f 100644 --- a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/YamlRestTestPluginFuncTest.groovy +++ b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/YamlRestTestPluginFuncTest.groovy @@ -24,16 +24,14 @@ class YamlRestTestPluginFuncTest extends AbstractGradleFuncTest { when: def result = gradleRunner("dependencies").build() - + def output = normalized(result.output) then: - result.output.contains(""" + output.contains(""" restTestSpecs -\\--- org.elasticsearch:rest-api-spec:${VersionProperties.elasticsearch} FAILED -""") - result.output.contains(""" +/--- org.elasticsearch:rest-api-spec:${VersionProperties.elasticsearch} FAILED""") + output.contains(normalized(""" yamlRestTestImplementation - Implementation only dependencies for source set 'yaml rest test'. (n) -\\--- org.elasticsearch.test:framework:8.0.0-SNAPSHOT (n) -""") +/--- org.elasticsearch.test:framework:${VersionProperties.elasticsearch} (n)""")) } def "yamlRestTest does nothing when there are no tests"() { From e827ec607867340b4470843681b76011e691a6e1 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 1 Sep 2021 09:29:01 +0100 Subject: [PATCH 053/128] Handle cgroups v2 in `OsProbe` (#76883) Closes #76812. `OsProbe` was only capable of handle cgroup data in the v1 format. However, Debian 11 uses cgroups v2 by default, and Elasticsearch isn't capable of reporting any cgroup information. Therefore, add support for the v2 layout. --- .../packaging/test/DockerTests.java | 1 - .../org/elasticsearch/monitor/os/OsProbe.java | 263 ++++++++++++++---- .../elasticsearch/bootstrap/security.policy | 6 + .../monitor/os/OsProbeTests.java | 125 +++++---- 4 files changed, 292 insertions(+), 103 deletions(-) diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index d5fd91a427bac..45689f0fed691 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -849,7 +849,6 @@ public void test131InitProcessHasCorrectPID() { /** * Check that Elasticsearch reports per-node cgroup information. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/76812") public void test140CgroupOsStatsAreAvailable() throws Exception { waitForElasticsearch(installation, USERNAME, PASSWORD); diff --git a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java index a20b7c7e27396..87a068f8ba7ff 100644 --- a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java +++ b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java @@ -30,25 +30,31 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.Stream; /** * The {@link OsProbe} class retrieves information about the physical and swap size of the machine * memory, as well as the system load average and cpu load. * - * In some exceptional cases, it's possible the underlying native methods used by + *

In some exceptional cases, it's possible the underlying native methods used by * {@link #getFreePhysicalMemorySize()}, {@link #getTotalPhysicalMemorySize()}, * {@link #getFreeSwapSpaceSize()}, and {@link #getTotalSwapSpaceSize()} can return a * negative value. Because of this, we prevent those methods from returning negative values, * returning 0 instead. * - * The OS can report a negative number in a number of cases: - * - Non-supported OSes (HP-UX, or AIX) - * - A failure of macOS to initialize host statistics - * - An OS that does not support the {@code _SC_PHYS_PAGES} or {@code _SC_PAGE_SIZE} flags for the {@code sysconf()} linux kernel call - * - An overflow of the product of {@code _SC_PHYS_PAGES} and {@code _SC_PAGE_SIZE} - * - An error case retrieving these values from a linux kernel - * - A non-standard libc implementation not implementing the required values - * For a more exhaustive explanation, see https://github.com/elastic/elasticsearch/pull/42725 + *

The OS can report a negative number in a number of cases: + * + *

    + *
  • Non-supported OSes (HP-UX, or AIX) + *
  • A failure of macOS to initialize host statistics + *
  • An OS that does not support the {@code _SC_PHYS_PAGES} or {@code _SC_PAGE_SIZE} flags for the {@code sysconf()} linux kernel call + *
  • An overflow of the product of {@code _SC_PHYS_PAGES} and {@code _SC_PAGE_SIZE} + *
  • An error case retrieving these values from a linux kernel + *
  • A non-standard libc implementation not implementing the required values + *
+ * + *

For a more exhaustive explanation, see https://github.com/elastic/elasticsearch/pull/42725 */ public class OsProbe { @@ -178,7 +184,7 @@ final double[] getSystemLoadAverage() { final String procLoadAvg = readProcLoadavg(); assert procLoadAvg.matches("(\\d+\\.\\d+\\s+){3}\\d+/\\d+\\s+\\d+"); final String[] fields = procLoadAvg.split("\\s+"); - return new double[]{Double.parseDouble(fields[0]), Double.parseDouble(fields[1]), Double.parseDouble(fields[2])}; + return new double[] { Double.parseDouble(fields[0]), Double.parseDouble(fields[1]), Double.parseDouble(fields[2]) }; } catch (final IOException e) { if (logger.isDebugEnabled()) { logger.debug("error reading /proc/loadavg", e); @@ -192,7 +198,7 @@ final double[] getSystemLoadAverage() { } try { final double oneMinuteLoadAverage = (double) getSystemLoadAverage.invoke(osMxBean); - return new double[]{oneMinuteLoadAverage >= 0 ? oneMinuteLoadAverage : -1, -1, -1}; + return new double[] { oneMinuteLoadAverage >= 0 ? oneMinuteLoadAverage : -1, -1, -1 }; } catch (IllegalAccessException | InvocationTargetException e) { if (logger.isDebugEnabled()) { logger.debug("error reading one minute load average from operating system", e); @@ -318,6 +324,23 @@ String readSysFsCgroupCpuAcctCpuAcctUsage(final String controlGroup) throws IOEx return readSingleLine(PathUtils.get("/sys/fs/cgroup/cpuacct", controlGroup, "cpuacct.usage")); } + private long[] getCgroupV2CpuLimit(String controlGroup) throws IOException { + String entry = readCgroupV2CpuLimit(controlGroup); + String[] parts = entry.split("\\s+"); + assert parts.length == 2 : "Expected 2 fields in [cpu.max]"; + + long[] values = new long[2]; + + values[0] = "max".equals(parts[0]) ? -1L : Long.parseLong(parts[0]); + values[1] = Long.parseLong(parts[1]); + return values; + } + + @SuppressForbidden(reason = "access /sys/fs/cgroup/cpu.max") + String readCgroupV2CpuLimit(String controlGroup) throws IOException { + return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "cpu.max")); + } + /** * The total period of time in microseconds for how frequently the Elasticsearch control group's access to CPU resources will be * reallocated. @@ -454,6 +477,35 @@ String readSysFsCgroupMemoryLimitInBytes(final String controlGroup) throws IOExc return readSingleLine(PathUtils.get("/sys/fs/cgroup/memory", controlGroup, "memory.limit_in_bytes")); } + /** + * The maximum amount of user memory (including file cache). + * If there is no limit then some Linux versions return the maximum value that can be stored in an + * unsigned 64 bit number, and this will overflow a long, hence the result type is String. + * (The alternative would have been BigInteger but then it would not be possible to index + * the OS stats document into Elasticsearch without losing information, as BigInteger is + * not a supported Elasticsearch type.) + * + * @param controlGroup the control group for the Elasticsearch process for the {@code memory} subsystem + * @return the maximum amount of user memory (including file cache) + * @throws IOException if an I/O exception occurs reading {@code memory.limit_in_bytes} for the control group + */ + private String getCgroupV2MemoryLimitInBytes(final String controlGroup) throws IOException { + return readSysFsCgroupV2MemoryLimitInBytes(controlGroup); + } + + /** + * Returns the line from {@code memory.max} for the control group to which the Elasticsearch process belongs for the + * {@code memory} subsystem. This line represents the maximum amount of user memory (including file cache). + * + * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem + * @return the line from {@code memory.max} + * @throws IOException if an I/O exception occurs reading {@code memory.max} for the control group + */ + @SuppressForbidden(reason = "access /sys/fs/cgroup/memory.max") + String readSysFsCgroupV2MemoryLimitInBytes(final String controlGroup) throws IOException { + return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "memory.max")); + } + /** * The total current memory usage by processes in the cgroup (in bytes). * If there is no limit then some Linux versions return the maximum value that can be stored in an @@ -483,6 +535,35 @@ String readSysFsCgroupMemoryUsageInBytes(final String controlGroup) throws IOExc return readSingleLine(PathUtils.get("/sys/fs/cgroup/memory", controlGroup, "memory.usage_in_bytes")); } + /** + * The total current memory usage by processes in the cgroup (in bytes). + * If there is no limit then some Linux versions return the maximum value that can be stored in an + * unsigned 64 bit number, and this will overflow a long, hence the result type is String. + * (The alternative would have been BigInteger but then it would not be possible to index + * the OS stats document into Elasticsearch without losing information, as BigInteger is + * not a supported Elasticsearch type.) + * + * @param controlGroup the control group for the Elasticsearch process for the {@code memory} subsystem + * @return the total current memory usage by processes in the cgroup (in bytes) + * @throws IOException if an I/O exception occurs reading {@code memory.current} for the control group + */ + private String getCgroupV2MemoryUsageInBytes(final String controlGroup) throws IOException { + return readSysFsCgroupV2MemoryUsageInBytes(controlGroup); + } + + /** + * Returns the line from {@code memory.current} for the control group to which the Elasticsearch process belongs for the + * {@code memory} subsystem. This line represents the total current memory usage by processes in the cgroup (in bytes). + * + * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem + * @return the line from {@code memory.current} + * @throws IOException if an I/O exception occurs reading {@code memory.current} for the control group + */ + @SuppressForbidden(reason = "access /sys/fs/cgroup/memory.current") + String readSysFsCgroupV2MemoryUsageInBytes(final String controlGroup) throws IOException { + return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "memory.current")); + } + /** * Checks if cgroup stats are available by checking for the existence of {@code /proc/self/cgroup}, {@code /sys/fs/cgroup/cpu}, * {@code /sys/fs/cgroup/cpuacct} and {@code /sys/fs/cgroup/memory}. @@ -490,20 +571,51 @@ String readSysFsCgroupMemoryUsageInBytes(final String controlGroup) throws IOExc * @return {@code true} if the stats are available, otherwise {@code false} */ @SuppressForbidden(reason = "access /proc/self/cgroup, /sys/fs/cgroup/cpu, /sys/fs/cgroup/cpuacct and /sys/fs/cgroup/memory") - boolean areCgroupStatsAvailable() { + boolean areCgroupStatsAvailable() throws IOException { if (Files.exists(PathUtils.get("/proc/self/cgroup")) == false) { return false; } - if (Files.exists(PathUtils.get("/sys/fs/cgroup/cpu")) == false) { - return false; - } - if (Files.exists(PathUtils.get("/sys/fs/cgroup/cpuacct")) == false) { - return false; + + List lines = readProcSelfCgroup(); + + // cgroup v2 + if (lines.size() == 1 && lines.get(0).startsWith("0::")) { + return Stream.of("/sys/fs/cgroup/cpu.stat", "/sys/fs/cgroup/memory.stat").allMatch(path -> Files.exists(PathUtils.get(path))); } - if (Files.exists(PathUtils.get("/sys/fs/cgroup/memory")) == false) { - return false; + + return Stream.of("/sys/fs/cgroup/cpu", "/sys/fs/cgroup/cpuacct", "/sys/fs/cgroup/memory") + .allMatch(path -> Files.exists(PathUtils.get(path))); + } + + /** + * The CPU statistics for all tasks in the Elasticsearch control group. + * + * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem + * @return the CPU statistics + * @throws IOException if an I/O exception occurs reading {@code cpu.stat} for the control group + */ + private Map getCgroupV2CpuStats(String controlGroup) throws IOException { + final List lines = readCgroupV2CpuStats(controlGroup); + final Map stats = new HashMap<>(); + + for (String line : lines) { + String[] parts = line.split("\\s+"); + assert parts.length == 2 : "Corrupt cpu.stat line: [" + line + "]"; + stats.put(parts[0], Long.parseLong(parts[1])); } - return true; + + final List expectedKeys = List.of("nr_periods", "nr_throttled", "system_usec", "throttled_usec", "usage_usec", "user_usec"); + expectedKeys.forEach(key -> { + assert stats.containsKey(key) : key; + assert stats.get(key) != -1 : stats.get(key); + }); + + return stats; + } + + @SuppressForbidden(reason = "access /sys/fs/cgroup/cpu.stat") + List readCgroupV2CpuStats(final String controlGroup) throws IOException { + return Files.readAllLines(PathUtils.get("/sys/fs/cgroup", controlGroup, "cpu.stat")); } /** @@ -515,45 +627,79 @@ private OsStats.Cgroup getCgroup() { try { if (areCgroupStatsAvailable() == false) { return null; - } else { - final Map controllerMap = getControlGroups(); - assert controllerMap.isEmpty() == false; + } + + final Map controllerMap = getControlGroups(); + assert controllerMap.isEmpty() == false; - final String cpuAcctControlGroup = controllerMap.get("cpuacct"); + final String cpuAcctControlGroup; + final long cgroupCpuAcctUsageNanos; + final long cgroupCpuAcctCpuCfsPeriodMicros; + final long cgroupCpuAcctCpuCfsQuotaMicros; + final String cpuControlGroup; + final OsStats.Cgroup.CpuStat cpuStat; + final String memoryControlGroup; + final String cgroupMemoryLimitInBytes; + final String cgroupMemoryUsageInBytes; + + if (controllerMap.size() == 1 && controllerMap.containsKey("")) { + // There's a single hierarchy for all controllers + cpuControlGroup = cpuAcctControlGroup = memoryControlGroup = controllerMap.get(""); + + // `cpuacct` was merged with `cpu` in v2 + final Map cpuStatsMap = getCgroupV2CpuStats(cpuControlGroup); + + cgroupCpuAcctUsageNanos = cpuStatsMap.get("usage_usec"); + + long[] cpuLimits = getCgroupV2CpuLimit(cpuControlGroup); + cgroupCpuAcctCpuCfsQuotaMicros = cpuLimits[0]; + cgroupCpuAcctCpuCfsPeriodMicros = cpuLimits[1]; + + cpuStat = new OsStats.Cgroup.CpuStat( + cpuStatsMap.get("nr_periods"), + cpuStatsMap.get("nr_throttled"), + cpuStatsMap.get("throttled_usec") + ); + + cgroupMemoryLimitInBytes = getCgroupV2MemoryLimitInBytes(memoryControlGroup); + cgroupMemoryUsageInBytes = getCgroupV2MemoryUsageInBytes(memoryControlGroup); + } else { + cpuAcctControlGroup = controllerMap.get("cpuacct"); if (cpuAcctControlGroup == null) { logger.debug("no [cpuacct] data found in cgroup stats"); return null; } - final long cgroupCpuAcctUsageNanos = getCgroupCpuAcctUsageNanos(cpuAcctControlGroup); + cgroupCpuAcctUsageNanos = getCgroupCpuAcctUsageNanos(cpuAcctControlGroup); - final String cpuControlGroup = controllerMap.get("cpu"); + cpuControlGroup = controllerMap.get("cpu"); if (cpuControlGroup == null) { logger.debug("no [cpu] data found in cgroup stats"); return null; } - final long cgroupCpuAcctCpuCfsPeriodMicros = getCgroupCpuAcctCpuCfsPeriodMicros(cpuControlGroup); - final long cgroupCpuAcctCpuCfsQuotaMicros = getCgroupCpuAcctCpuCfsQuotaMicros(cpuControlGroup); - final OsStats.Cgroup.CpuStat cpuStat = getCgroupCpuAcctCpuStat(cpuControlGroup); + cgroupCpuAcctCpuCfsPeriodMicros = getCgroupCpuAcctCpuCfsPeriodMicros(cpuControlGroup); + cgroupCpuAcctCpuCfsQuotaMicros = getCgroupCpuAcctCpuCfsQuotaMicros(cpuControlGroup); + cpuStat = getCgroupCpuAcctCpuStat(cpuControlGroup); - final String memoryControlGroup = controllerMap.get("memory"); + memoryControlGroup = controllerMap.get("memory"); if (memoryControlGroup == null) { logger.debug("no [memory] data found in cgroup stats"); return null; } - final String cgroupMemoryLimitInBytes = getCgroupMemoryLimitInBytes(memoryControlGroup); - final String cgroupMemoryUsageInBytes = getCgroupMemoryUsageInBytes(memoryControlGroup); - - return new OsStats.Cgroup( - cpuAcctControlGroup, - cgroupCpuAcctUsageNanos, - cpuControlGroup, - cgroupCpuAcctCpuCfsPeriodMicros, - cgroupCpuAcctCpuCfsQuotaMicros, - cpuStat, - memoryControlGroup, - cgroupMemoryLimitInBytes, - cgroupMemoryUsageInBytes); + cgroupMemoryLimitInBytes = getCgroupMemoryLimitInBytes(memoryControlGroup); + cgroupMemoryUsageInBytes = getCgroupMemoryUsageInBytes(memoryControlGroup); } + + return new OsStats.Cgroup( + cpuAcctControlGroup, + cgroupCpuAcctUsageNanos, + cpuControlGroup, + cgroupCpuAcctCpuCfsPeriodMicros, + cgroupCpuAcctCpuCfsQuotaMicros, + cpuStat, + memoryControlGroup, + cgroupMemoryLimitInBytes, + cgroupMemoryUsageInBytes + ); } catch (final IOException e) { logger.debug("error reading control group stats", e); return null; @@ -576,13 +722,14 @@ public static OsProbe getInstance() { OsInfo osInfo(long refreshInterval, int allocatedProcessors) throws IOException { return new OsInfo( - refreshInterval, - Runtime.getRuntime().availableProcessors(), - allocatedProcessors, - Constants.OS_NAME, - getPrettyName(), - Constants.OS_ARCH, - Constants.OS_VERSION); + refreshInterval, + Runtime.getRuntime().availableProcessors(), + allocatedProcessors, + Constants.OS_NAME, + getPrettyName(), + Constants.OS_ARCH, + Constants.OS_VERSION + ); } private String getPrettyName() throws IOException { @@ -594,11 +741,13 @@ private String getPrettyName() throws IOException { * wrapped in single- or double-quotes. */ final List etcOsReleaseLines = readOsRelease(); - final List prettyNameLines = - etcOsReleaseLines.stream().filter(line -> line.startsWith("PRETTY_NAME")).collect(Collectors.toList()); + final List prettyNameLines = etcOsReleaseLines.stream() + .filter(line -> line.startsWith("PRETTY_NAME")) + .collect(Collectors.toList()); assert prettyNameLines.size() <= 1 : prettyNameLines; - final Optional maybePrettyNameLine = - prettyNameLines.size() == 1 ? Optional.of(prettyNameLines.get(0)) : Optional.empty(); + final Optional maybePrettyNameLine = prettyNameLines.size() == 1 + ? Optional.of(prettyNameLines.get(0)) + : Optional.empty(); if (maybePrettyNameLine.isPresent()) { // we trim since some OS contain trailing space, for example, Oracle Linux Server 6.9 has a trailing space after the quote final String trimmedPrettyNameLine = maybePrettyNameLine.get().trim(); @@ -695,11 +844,15 @@ boolean isDebian8() throws IOException { return Constants.LINUX && getPrettyName().equals("Debian GNU/Linux 8 (jessie)"); } + OsStats.Cgroup getCgroup(boolean isLinux) { + return isLinux ? getCgroup() : null; + } + public OsStats osStats() { final OsStats.Cpu cpu = new OsStats.Cpu(getSystemCpuPercent(), getSystemLoadAverage()); final OsStats.Mem mem = new OsStats.Mem(getTotalPhysicalMemorySize(), getFreePhysicalMemorySize()); final OsStats.Swap swap = new OsStats.Swap(getTotalSwapSpaceSize(), getFreeSwapSpaceSize()); - final OsStats.Cgroup cgroup = Constants.LINUX ? getCgroup() : null; + final OsStats.Cgroup cgroup = getCgroup(Constants.LINUX); return new OsStats(System.currentTimeMillis(), cpu, mem, swap, cgroup); } diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy index 8ffc0d0eea47d..a410a7b5debc6 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -153,6 +153,12 @@ grant { permission java.io.FilePermission "/sys/fs/cgroup/memory", "read"; permission java.io.FilePermission "/sys/fs/cgroup/memory/-", "read"; + // control group v2 stats on linux + permission java.io.FilePermission "/sys/fs/cgroup/cpu.max", "read"; + permission java.io.FilePermission "/sys/fs/cgroup/cpu.stat", "read"; + permission java.io.FilePermission "/sys/fs/cgroup/memory.current", "read"; + permission java.io.FilePermission "/sys/fs/cgroup/memory.max", "read"; + // system memory on Linux systems affected by JDK bug (#66629) permission java.io.FilePermission "/proc/meminfo", "read"; }; diff --git a/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index 8be3723d72cc3..ac802cf738500 100644 --- a/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -43,7 +43,7 @@ public void testOsInfo() throws IOException { final OsProbe osProbe = new OsProbe() { @Override - List readOsRelease() throws IOException { + List readOsRelease() { assert Constants.LINUX : Constants.OS_NAME; if (prettyName != null) { final String quote = randomFrom("\"", "'", ""); @@ -78,8 +78,10 @@ public void testOsStats() { OsStats stats = osProbe.osStats(); assertNotNull(stats); assertThat(stats.getTimestamp(), greaterThan(0L)); - assertThat(stats.getCpu().getPercent(), anyOf(equalTo((short) -1), - is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100))))); + assertThat( + stats.getCpu().getPercent(), + anyOf(equalTo((short) -1), is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100)))) + ); double[] loadAverage = stats.getCpu().getLoadAverage(); if (loadAverage != null) { assertThat(loadAverage.length, equalTo(3)); @@ -173,16 +175,14 @@ String readProcLoadavg() { } public void testCgroupProbe() { - assumeTrue("test runs on Linux only", Constants.LINUX); - - final boolean areCgroupStatsAvailable = randomBoolean(); + final int availableCgroupsVersion = randomFrom(0, 1, 2); final String hierarchy = randomAlphaOfLength(16); - final OsProbe probe = buildStubOsProbe(areCgroupStatsAvailable, hierarchy); + final OsProbe probe = buildStubOsProbe(availableCgroupsVersion, hierarchy); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); - if (areCgroupStatsAvailable) { + if (availableCgroupsVersion > 0) { assertNotNull(cgroup); assertThat(cgroup.getCpuAcctControlGroup(), equalTo("/" + hierarchy)); assertThat(cgroup.getCpuAcctUsageNanos(), equalTo(364869866063112L)); @@ -200,17 +200,14 @@ public void testCgroupProbe() { } public void testCgroupProbeWithMissingCpuAcct() { - assumeTrue("test runs on Linux only", Constants.LINUX); - final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about cpuacct - List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) - .stream() + List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() .map(line -> line.replaceFirst(",cpuacct", "")) .collect(Collectors.toList()); - final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); + final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -218,18 +215,14 @@ public void testCgroupProbeWithMissingCpuAcct() { } public void testCgroupProbeWithMissingCpu() { - assumeTrue("test runs on Linux only", Constants.LINUX); - final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about cpu - List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) - .stream() + List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() .map(line -> line.replaceFirst(":cpu,", ":")) .collect(Collectors.toList()); - - final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); + final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -237,17 +230,14 @@ public void testCgroupProbeWithMissingCpu() { } public void testCgroupProbeWithMissingMemory() { - assumeTrue("test runs on Linux only", Constants.LINUX); - final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about memory - List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) - .stream() + List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() .filter(line -> line.contains(":memory:") == false) .collect(Collectors.toList()); - final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); + final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -255,6 +245,8 @@ public void testCgroupProbeWithMissingMemory() { } public void testGetTotalMemFromProcMeminfo() throws Exception { + int cgroupsVersion = randomFrom(1, 2); + // missing MemTotal line var meminfoLines = Arrays.asList( "MemFree: 8467692 kB", @@ -265,7 +257,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - OsProbe probe = buildStubOsProbe(true, "", List.of(), meminfoLines); + OsProbe probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with invalid value @@ -279,7 +271,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(true, "", List.of(), meminfoLines); + probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with invalid unit @@ -293,7 +285,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(true, "", List.of(), meminfoLines); + probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with random valid value @@ -308,7 +300,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(true, "", List.of(), meminfoLines); + probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(memTotalInKb * 1024L)); } @@ -319,7 +311,13 @@ public void testGetTotalMemoryOnDebian8() throws Exception { assertThat(osProbe.getTotalPhysicalMemorySize(), greaterThan(0L)); } - private static List getProcSelfGroupLines(String hierarchy) { + private static List getProcSelfGroupLines(int cgroupsVersion, String hierarchy) { + // It doesn't really matter if cgroupsVersion == 0 here + + if (cgroupsVersion == 2) { + return List.of("0::/" + hierarchy); + } + return Arrays.asList( "10:freezer:/", "9:net_cls,net_prio:/", @@ -331,32 +329,40 @@ private static List getProcSelfGroupLines(String hierarchy) { "3:perf_event:/", "2:cpu,cpuacct,cpuset:/" + hierarchy, "1:name=systemd:/user.slice/user-1000.slice/session-2359.scope", - "0::/cgroup2"); + "0::/cgroup2" + ); } - private static OsProbe buildStubOsProbe(final boolean areCgroupStatsAvailable, final String hierarchy) { - List procSelfCgroupLines = getProcSelfGroupLines(hierarchy); + private static OsProbe buildStubOsProbe(final int availableCgroupsVersion, final String hierarchy) { + List procSelfCgroupLines = getProcSelfGroupLines(availableCgroupsVersion, hierarchy); - return buildStubOsProbe(areCgroupStatsAvailable, hierarchy, procSelfCgroupLines); + return buildStubOsProbe(availableCgroupsVersion, hierarchy, procSelfCgroupLines); } /** * Builds a test instance of OsProbe. Methods that ordinarily read from the filesystem are overridden to return values based upon * the arguments to this method. * - * @param areCgroupStatsAvailable whether or not cgroup data is available. Normally OsProbe establishes this for itself. + * @param availableCgroupsVersion what version of cgroups are available, 1 or 2, or 0 for no cgroups. Normally OsProbe establishes this + * for itself. * @param hierarchy a mock value used to generate a cgroup hierarchy. * @param procSelfCgroupLines the lines that will be used as the content of /proc/self/cgroup * @param procMeminfoLines lines that will be used as the content of /proc/meminfo * @return a test instance */ private static OsProbe buildStubOsProbe( - final boolean areCgroupStatsAvailable, + final int availableCgroupsVersion, final String hierarchy, List procSelfCgroupLines, List procMeminfoLines ) { return new OsProbe() { + @Override + OsStats.Cgroup getCgroup(boolean isLinux) { + // Pretend we're always on Linux so that we can run the cgroup tests + return super.getCgroup(true); + } + @Override List readProcSelfCgroup() { return procSelfCgroupLines; @@ -382,10 +388,7 @@ String readSysFsCgroupCpuAcctCpuAcctCfsQuota(String controlGroup) { @Override List readSysFsCgroupCpuAcctCpuStat(String controlGroup) { - return Arrays.asList( - "nr_periods 17992", - "nr_throttled 1311", - "throttled_time 139298645489"); + return Arrays.asList("nr_periods 17992", "nr_throttled 1311", "throttled_time 139298645489"); } @Override @@ -403,22 +406,50 @@ String readSysFsCgroupMemoryUsageInBytes(String controlGroup) { @Override boolean areCgroupStatsAvailable() { - return areCgroupStatsAvailable; + return availableCgroupsVersion > 0; } @Override - List readProcMeminfo() throws IOException { + List readProcMeminfo() { return procMeminfoLines; } + + @Override + String readSysFsCgroupV2MemoryLimitInBytes(String controlGroup) { + assertThat(controlGroup, equalTo("/" + hierarchy)); + // This is the highest value that can be stored in an unsigned 64 bit number, hence too big for long + return "18446744073709551615"; + } + + @Override + String readSysFsCgroupV2MemoryUsageInBytes(String controlGroup) { + assertThat(controlGroup, equalTo("/" + hierarchy)); + return "4796416"; + } + + @Override + List readCgroupV2CpuStats(String controlGroup) { + assertThat(controlGroup, equalTo("/" + hierarchy)); + return List.of( + "usage_usec 364869866063112", + "user_usec 34636", + "system_usec 9896", + "nr_periods 17992", + "nr_throttled 1311", + "throttled_usec 139298645489" + ); + } + + @Override + String readCgroupV2CpuLimit(String controlGroup) { + assertThat(controlGroup, equalTo("/" + hierarchy)); + return "50000 100000"; + } }; } - private static OsProbe buildStubOsProbe( - final boolean areCgroupStatsAvailable, - final String hierarchy, - List procSelfCgroupLines - ) { - return buildStubOsProbe(areCgroupStatsAvailable, hierarchy, procSelfCgroupLines, List.of()); + private static OsProbe buildStubOsProbe(final int availableCgroupsVersion, final String hierarchy, List procSelfCgroupLines) { + return buildStubOsProbe(availableCgroupsVersion, hierarchy, procSelfCgroupLines, List.of()); } } From 54a9c3ba058733efbe4663b1dd56435c87e85b0c Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 1 Sep 2021 09:42:45 +0100 Subject: [PATCH 054/128] Revert "Handle cgroups v2 in `OsProbe` (#76883)" This reverts commit e827ec607867340b4470843681b76011e691a6e1. --- .../packaging/test/DockerTests.java | 1 + .../org/elasticsearch/monitor/os/OsProbe.java | 263 ++++-------------- .../elasticsearch/bootstrap/security.policy | 6 - .../monitor/os/OsProbeTests.java | 125 ++++----- 4 files changed, 103 insertions(+), 292 deletions(-) diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index 45689f0fed691..d5fd91a427bac 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -849,6 +849,7 @@ public void test131InitProcessHasCorrectPID() { /** * Check that Elasticsearch reports per-node cgroup information. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/76812") public void test140CgroupOsStatsAreAvailable() throws Exception { waitForElasticsearch(installation, USERNAME, PASSWORD); diff --git a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java index 87a068f8ba7ff..a20b7c7e27396 100644 --- a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java +++ b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java @@ -30,31 +30,25 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; -import java.util.stream.Stream; /** * The {@link OsProbe} class retrieves information about the physical and swap size of the machine * memory, as well as the system load average and cpu load. * - *

In some exceptional cases, it's possible the underlying native methods used by + * In some exceptional cases, it's possible the underlying native methods used by * {@link #getFreePhysicalMemorySize()}, {@link #getTotalPhysicalMemorySize()}, * {@link #getFreeSwapSpaceSize()}, and {@link #getTotalSwapSpaceSize()} can return a * negative value. Because of this, we prevent those methods from returning negative values, * returning 0 instead. * - *

The OS can report a negative number in a number of cases: - * - *

    - *
  • Non-supported OSes (HP-UX, or AIX) - *
  • A failure of macOS to initialize host statistics - *
  • An OS that does not support the {@code _SC_PHYS_PAGES} or {@code _SC_PAGE_SIZE} flags for the {@code sysconf()} linux kernel call - *
  • An overflow of the product of {@code _SC_PHYS_PAGES} and {@code _SC_PAGE_SIZE} - *
  • An error case retrieving these values from a linux kernel - *
  • A non-standard libc implementation not implementing the required values - *
- * - *

For a more exhaustive explanation, see https://github.com/elastic/elasticsearch/pull/42725 + * The OS can report a negative number in a number of cases: + * - Non-supported OSes (HP-UX, or AIX) + * - A failure of macOS to initialize host statistics + * - An OS that does not support the {@code _SC_PHYS_PAGES} or {@code _SC_PAGE_SIZE} flags for the {@code sysconf()} linux kernel call + * - An overflow of the product of {@code _SC_PHYS_PAGES} and {@code _SC_PAGE_SIZE} + * - An error case retrieving these values from a linux kernel + * - A non-standard libc implementation not implementing the required values + * For a more exhaustive explanation, see https://github.com/elastic/elasticsearch/pull/42725 */ public class OsProbe { @@ -184,7 +178,7 @@ final double[] getSystemLoadAverage() { final String procLoadAvg = readProcLoadavg(); assert procLoadAvg.matches("(\\d+\\.\\d+\\s+){3}\\d+/\\d+\\s+\\d+"); final String[] fields = procLoadAvg.split("\\s+"); - return new double[] { Double.parseDouble(fields[0]), Double.parseDouble(fields[1]), Double.parseDouble(fields[2]) }; + return new double[]{Double.parseDouble(fields[0]), Double.parseDouble(fields[1]), Double.parseDouble(fields[2])}; } catch (final IOException e) { if (logger.isDebugEnabled()) { logger.debug("error reading /proc/loadavg", e); @@ -198,7 +192,7 @@ final double[] getSystemLoadAverage() { } try { final double oneMinuteLoadAverage = (double) getSystemLoadAverage.invoke(osMxBean); - return new double[] { oneMinuteLoadAverage >= 0 ? oneMinuteLoadAverage : -1, -1, -1 }; + return new double[]{oneMinuteLoadAverage >= 0 ? oneMinuteLoadAverage : -1, -1, -1}; } catch (IllegalAccessException | InvocationTargetException e) { if (logger.isDebugEnabled()) { logger.debug("error reading one minute load average from operating system", e); @@ -324,23 +318,6 @@ String readSysFsCgroupCpuAcctCpuAcctUsage(final String controlGroup) throws IOEx return readSingleLine(PathUtils.get("/sys/fs/cgroup/cpuacct", controlGroup, "cpuacct.usage")); } - private long[] getCgroupV2CpuLimit(String controlGroup) throws IOException { - String entry = readCgroupV2CpuLimit(controlGroup); - String[] parts = entry.split("\\s+"); - assert parts.length == 2 : "Expected 2 fields in [cpu.max]"; - - long[] values = new long[2]; - - values[0] = "max".equals(parts[0]) ? -1L : Long.parseLong(parts[0]); - values[1] = Long.parseLong(parts[1]); - return values; - } - - @SuppressForbidden(reason = "access /sys/fs/cgroup/cpu.max") - String readCgroupV2CpuLimit(String controlGroup) throws IOException { - return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "cpu.max")); - } - /** * The total period of time in microseconds for how frequently the Elasticsearch control group's access to CPU resources will be * reallocated. @@ -477,35 +454,6 @@ String readSysFsCgroupMemoryLimitInBytes(final String controlGroup) throws IOExc return readSingleLine(PathUtils.get("/sys/fs/cgroup/memory", controlGroup, "memory.limit_in_bytes")); } - /** - * The maximum amount of user memory (including file cache). - * If there is no limit then some Linux versions return the maximum value that can be stored in an - * unsigned 64 bit number, and this will overflow a long, hence the result type is String. - * (The alternative would have been BigInteger but then it would not be possible to index - * the OS stats document into Elasticsearch without losing information, as BigInteger is - * not a supported Elasticsearch type.) - * - * @param controlGroup the control group for the Elasticsearch process for the {@code memory} subsystem - * @return the maximum amount of user memory (including file cache) - * @throws IOException if an I/O exception occurs reading {@code memory.limit_in_bytes} for the control group - */ - private String getCgroupV2MemoryLimitInBytes(final String controlGroup) throws IOException { - return readSysFsCgroupV2MemoryLimitInBytes(controlGroup); - } - - /** - * Returns the line from {@code memory.max} for the control group to which the Elasticsearch process belongs for the - * {@code memory} subsystem. This line represents the maximum amount of user memory (including file cache). - * - * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem - * @return the line from {@code memory.max} - * @throws IOException if an I/O exception occurs reading {@code memory.max} for the control group - */ - @SuppressForbidden(reason = "access /sys/fs/cgroup/memory.max") - String readSysFsCgroupV2MemoryLimitInBytes(final String controlGroup) throws IOException { - return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "memory.max")); - } - /** * The total current memory usage by processes in the cgroup (in bytes). * If there is no limit then some Linux versions return the maximum value that can be stored in an @@ -535,35 +483,6 @@ String readSysFsCgroupMemoryUsageInBytes(final String controlGroup) throws IOExc return readSingleLine(PathUtils.get("/sys/fs/cgroup/memory", controlGroup, "memory.usage_in_bytes")); } - /** - * The total current memory usage by processes in the cgroup (in bytes). - * If there is no limit then some Linux versions return the maximum value that can be stored in an - * unsigned 64 bit number, and this will overflow a long, hence the result type is String. - * (The alternative would have been BigInteger but then it would not be possible to index - * the OS stats document into Elasticsearch without losing information, as BigInteger is - * not a supported Elasticsearch type.) - * - * @param controlGroup the control group for the Elasticsearch process for the {@code memory} subsystem - * @return the total current memory usage by processes in the cgroup (in bytes) - * @throws IOException if an I/O exception occurs reading {@code memory.current} for the control group - */ - private String getCgroupV2MemoryUsageInBytes(final String controlGroup) throws IOException { - return readSysFsCgroupV2MemoryUsageInBytes(controlGroup); - } - - /** - * Returns the line from {@code memory.current} for the control group to which the Elasticsearch process belongs for the - * {@code memory} subsystem. This line represents the total current memory usage by processes in the cgroup (in bytes). - * - * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem - * @return the line from {@code memory.current} - * @throws IOException if an I/O exception occurs reading {@code memory.current} for the control group - */ - @SuppressForbidden(reason = "access /sys/fs/cgroup/memory.current") - String readSysFsCgroupV2MemoryUsageInBytes(final String controlGroup) throws IOException { - return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "memory.current")); - } - /** * Checks if cgroup stats are available by checking for the existence of {@code /proc/self/cgroup}, {@code /sys/fs/cgroup/cpu}, * {@code /sys/fs/cgroup/cpuacct} and {@code /sys/fs/cgroup/memory}. @@ -571,51 +490,20 @@ String readSysFsCgroupV2MemoryUsageInBytes(final String controlGroup) throws IOE * @return {@code true} if the stats are available, otherwise {@code false} */ @SuppressForbidden(reason = "access /proc/self/cgroup, /sys/fs/cgroup/cpu, /sys/fs/cgroup/cpuacct and /sys/fs/cgroup/memory") - boolean areCgroupStatsAvailable() throws IOException { + boolean areCgroupStatsAvailable() { if (Files.exists(PathUtils.get("/proc/self/cgroup")) == false) { return false; } - - List lines = readProcSelfCgroup(); - - // cgroup v2 - if (lines.size() == 1 && lines.get(0).startsWith("0::")) { - return Stream.of("/sys/fs/cgroup/cpu.stat", "/sys/fs/cgroup/memory.stat").allMatch(path -> Files.exists(PathUtils.get(path))); + if (Files.exists(PathUtils.get("/sys/fs/cgroup/cpu")) == false) { + return false; } - - return Stream.of("/sys/fs/cgroup/cpu", "/sys/fs/cgroup/cpuacct", "/sys/fs/cgroup/memory") - .allMatch(path -> Files.exists(PathUtils.get(path))); - } - - /** - * The CPU statistics for all tasks in the Elasticsearch control group. - * - * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem - * @return the CPU statistics - * @throws IOException if an I/O exception occurs reading {@code cpu.stat} for the control group - */ - private Map getCgroupV2CpuStats(String controlGroup) throws IOException { - final List lines = readCgroupV2CpuStats(controlGroup); - final Map stats = new HashMap<>(); - - for (String line : lines) { - String[] parts = line.split("\\s+"); - assert parts.length == 2 : "Corrupt cpu.stat line: [" + line + "]"; - stats.put(parts[0], Long.parseLong(parts[1])); + if (Files.exists(PathUtils.get("/sys/fs/cgroup/cpuacct")) == false) { + return false; } - - final List expectedKeys = List.of("nr_periods", "nr_throttled", "system_usec", "throttled_usec", "usage_usec", "user_usec"); - expectedKeys.forEach(key -> { - assert stats.containsKey(key) : key; - assert stats.get(key) != -1 : stats.get(key); - }); - - return stats; - } - - @SuppressForbidden(reason = "access /sys/fs/cgroup/cpu.stat") - List readCgroupV2CpuStats(final String controlGroup) throws IOException { - return Files.readAllLines(PathUtils.get("/sys/fs/cgroup", controlGroup, "cpu.stat")); + if (Files.exists(PathUtils.get("/sys/fs/cgroup/memory")) == false) { + return false; + } + return true; } /** @@ -627,79 +515,45 @@ private OsStats.Cgroup getCgroup() { try { if (areCgroupStatsAvailable() == false) { return null; - } - - final Map controllerMap = getControlGroups(); - assert controllerMap.isEmpty() == false; - - final String cpuAcctControlGroup; - final long cgroupCpuAcctUsageNanos; - final long cgroupCpuAcctCpuCfsPeriodMicros; - final long cgroupCpuAcctCpuCfsQuotaMicros; - final String cpuControlGroup; - final OsStats.Cgroup.CpuStat cpuStat; - final String memoryControlGroup; - final String cgroupMemoryLimitInBytes; - final String cgroupMemoryUsageInBytes; - - if (controllerMap.size() == 1 && controllerMap.containsKey("")) { - // There's a single hierarchy for all controllers - cpuControlGroup = cpuAcctControlGroup = memoryControlGroup = controllerMap.get(""); - - // `cpuacct` was merged with `cpu` in v2 - final Map cpuStatsMap = getCgroupV2CpuStats(cpuControlGroup); - - cgroupCpuAcctUsageNanos = cpuStatsMap.get("usage_usec"); - - long[] cpuLimits = getCgroupV2CpuLimit(cpuControlGroup); - cgroupCpuAcctCpuCfsQuotaMicros = cpuLimits[0]; - cgroupCpuAcctCpuCfsPeriodMicros = cpuLimits[1]; - - cpuStat = new OsStats.Cgroup.CpuStat( - cpuStatsMap.get("nr_periods"), - cpuStatsMap.get("nr_throttled"), - cpuStatsMap.get("throttled_usec") - ); - - cgroupMemoryLimitInBytes = getCgroupV2MemoryLimitInBytes(memoryControlGroup); - cgroupMemoryUsageInBytes = getCgroupV2MemoryUsageInBytes(memoryControlGroup); } else { - cpuAcctControlGroup = controllerMap.get("cpuacct"); + final Map controllerMap = getControlGroups(); + assert controllerMap.isEmpty() == false; + + final String cpuAcctControlGroup = controllerMap.get("cpuacct"); if (cpuAcctControlGroup == null) { logger.debug("no [cpuacct] data found in cgroup stats"); return null; } - cgroupCpuAcctUsageNanos = getCgroupCpuAcctUsageNanos(cpuAcctControlGroup); + final long cgroupCpuAcctUsageNanos = getCgroupCpuAcctUsageNanos(cpuAcctControlGroup); - cpuControlGroup = controllerMap.get("cpu"); + final String cpuControlGroup = controllerMap.get("cpu"); if (cpuControlGroup == null) { logger.debug("no [cpu] data found in cgroup stats"); return null; } - cgroupCpuAcctCpuCfsPeriodMicros = getCgroupCpuAcctCpuCfsPeriodMicros(cpuControlGroup); - cgroupCpuAcctCpuCfsQuotaMicros = getCgroupCpuAcctCpuCfsQuotaMicros(cpuControlGroup); - cpuStat = getCgroupCpuAcctCpuStat(cpuControlGroup); + final long cgroupCpuAcctCpuCfsPeriodMicros = getCgroupCpuAcctCpuCfsPeriodMicros(cpuControlGroup); + final long cgroupCpuAcctCpuCfsQuotaMicros = getCgroupCpuAcctCpuCfsQuotaMicros(cpuControlGroup); + final OsStats.Cgroup.CpuStat cpuStat = getCgroupCpuAcctCpuStat(cpuControlGroup); - memoryControlGroup = controllerMap.get("memory"); + final String memoryControlGroup = controllerMap.get("memory"); if (memoryControlGroup == null) { logger.debug("no [memory] data found in cgroup stats"); return null; } - cgroupMemoryLimitInBytes = getCgroupMemoryLimitInBytes(memoryControlGroup); - cgroupMemoryUsageInBytes = getCgroupMemoryUsageInBytes(memoryControlGroup); + final String cgroupMemoryLimitInBytes = getCgroupMemoryLimitInBytes(memoryControlGroup); + final String cgroupMemoryUsageInBytes = getCgroupMemoryUsageInBytes(memoryControlGroup); + + return new OsStats.Cgroup( + cpuAcctControlGroup, + cgroupCpuAcctUsageNanos, + cpuControlGroup, + cgroupCpuAcctCpuCfsPeriodMicros, + cgroupCpuAcctCpuCfsQuotaMicros, + cpuStat, + memoryControlGroup, + cgroupMemoryLimitInBytes, + cgroupMemoryUsageInBytes); } - - return new OsStats.Cgroup( - cpuAcctControlGroup, - cgroupCpuAcctUsageNanos, - cpuControlGroup, - cgroupCpuAcctCpuCfsPeriodMicros, - cgroupCpuAcctCpuCfsQuotaMicros, - cpuStat, - memoryControlGroup, - cgroupMemoryLimitInBytes, - cgroupMemoryUsageInBytes - ); } catch (final IOException e) { logger.debug("error reading control group stats", e); return null; @@ -722,14 +576,13 @@ public static OsProbe getInstance() { OsInfo osInfo(long refreshInterval, int allocatedProcessors) throws IOException { return new OsInfo( - refreshInterval, - Runtime.getRuntime().availableProcessors(), - allocatedProcessors, - Constants.OS_NAME, - getPrettyName(), - Constants.OS_ARCH, - Constants.OS_VERSION - ); + refreshInterval, + Runtime.getRuntime().availableProcessors(), + allocatedProcessors, + Constants.OS_NAME, + getPrettyName(), + Constants.OS_ARCH, + Constants.OS_VERSION); } private String getPrettyName() throws IOException { @@ -741,13 +594,11 @@ private String getPrettyName() throws IOException { * wrapped in single- or double-quotes. */ final List etcOsReleaseLines = readOsRelease(); - final List prettyNameLines = etcOsReleaseLines.stream() - .filter(line -> line.startsWith("PRETTY_NAME")) - .collect(Collectors.toList()); + final List prettyNameLines = + etcOsReleaseLines.stream().filter(line -> line.startsWith("PRETTY_NAME")).collect(Collectors.toList()); assert prettyNameLines.size() <= 1 : prettyNameLines; - final Optional maybePrettyNameLine = prettyNameLines.size() == 1 - ? Optional.of(prettyNameLines.get(0)) - : Optional.empty(); + final Optional maybePrettyNameLine = + prettyNameLines.size() == 1 ? Optional.of(prettyNameLines.get(0)) : Optional.empty(); if (maybePrettyNameLine.isPresent()) { // we trim since some OS contain trailing space, for example, Oracle Linux Server 6.9 has a trailing space after the quote final String trimmedPrettyNameLine = maybePrettyNameLine.get().trim(); @@ -844,15 +695,11 @@ boolean isDebian8() throws IOException { return Constants.LINUX && getPrettyName().equals("Debian GNU/Linux 8 (jessie)"); } - OsStats.Cgroup getCgroup(boolean isLinux) { - return isLinux ? getCgroup() : null; - } - public OsStats osStats() { final OsStats.Cpu cpu = new OsStats.Cpu(getSystemCpuPercent(), getSystemLoadAverage()); final OsStats.Mem mem = new OsStats.Mem(getTotalPhysicalMemorySize(), getFreePhysicalMemorySize()); final OsStats.Swap swap = new OsStats.Swap(getTotalSwapSpaceSize(), getFreeSwapSpaceSize()); - final OsStats.Cgroup cgroup = getCgroup(Constants.LINUX); + final OsStats.Cgroup cgroup = Constants.LINUX ? getCgroup() : null; return new OsStats(System.currentTimeMillis(), cpu, mem, swap, cgroup); } diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy index a410a7b5debc6..8ffc0d0eea47d 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -153,12 +153,6 @@ grant { permission java.io.FilePermission "/sys/fs/cgroup/memory", "read"; permission java.io.FilePermission "/sys/fs/cgroup/memory/-", "read"; - // control group v2 stats on linux - permission java.io.FilePermission "/sys/fs/cgroup/cpu.max", "read"; - permission java.io.FilePermission "/sys/fs/cgroup/cpu.stat", "read"; - permission java.io.FilePermission "/sys/fs/cgroup/memory.current", "read"; - permission java.io.FilePermission "/sys/fs/cgroup/memory.max", "read"; - // system memory on Linux systems affected by JDK bug (#66629) permission java.io.FilePermission "/proc/meminfo", "read"; }; diff --git a/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index ac802cf738500..8be3723d72cc3 100644 --- a/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -43,7 +43,7 @@ public void testOsInfo() throws IOException { final OsProbe osProbe = new OsProbe() { @Override - List readOsRelease() { + List readOsRelease() throws IOException { assert Constants.LINUX : Constants.OS_NAME; if (prettyName != null) { final String quote = randomFrom("\"", "'", ""); @@ -78,10 +78,8 @@ public void testOsStats() { OsStats stats = osProbe.osStats(); assertNotNull(stats); assertThat(stats.getTimestamp(), greaterThan(0L)); - assertThat( - stats.getCpu().getPercent(), - anyOf(equalTo((short) -1), is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100)))) - ); + assertThat(stats.getCpu().getPercent(), anyOf(equalTo((short) -1), + is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100))))); double[] loadAverage = stats.getCpu().getLoadAverage(); if (loadAverage != null) { assertThat(loadAverage.length, equalTo(3)); @@ -175,14 +173,16 @@ String readProcLoadavg() { } public void testCgroupProbe() { - final int availableCgroupsVersion = randomFrom(0, 1, 2); + assumeTrue("test runs on Linux only", Constants.LINUX); + + final boolean areCgroupStatsAvailable = randomBoolean(); final String hierarchy = randomAlphaOfLength(16); - final OsProbe probe = buildStubOsProbe(availableCgroupsVersion, hierarchy); + final OsProbe probe = buildStubOsProbe(areCgroupStatsAvailable, hierarchy); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); - if (availableCgroupsVersion > 0) { + if (areCgroupStatsAvailable) { assertNotNull(cgroup); assertThat(cgroup.getCpuAcctControlGroup(), equalTo("/" + hierarchy)); assertThat(cgroup.getCpuAcctUsageNanos(), equalTo(364869866063112L)); @@ -200,14 +200,17 @@ public void testCgroupProbe() { } public void testCgroupProbeWithMissingCpuAcct() { + assumeTrue("test runs on Linux only", Constants.LINUX); + final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about cpuacct - List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() + List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) + .stream() .map(line -> line.replaceFirst(",cpuacct", "")) .collect(Collectors.toList()); - final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); + final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -215,14 +218,18 @@ public void testCgroupProbeWithMissingCpuAcct() { } public void testCgroupProbeWithMissingCpu() { + assumeTrue("test runs on Linux only", Constants.LINUX); + final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about cpu - List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() + List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) + .stream() .map(line -> line.replaceFirst(":cpu,", ":")) .collect(Collectors.toList()); - final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); + + final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -230,14 +237,17 @@ public void testCgroupProbeWithMissingCpu() { } public void testCgroupProbeWithMissingMemory() { + assumeTrue("test runs on Linux only", Constants.LINUX); + final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about memory - List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() + List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) + .stream() .filter(line -> line.contains(":memory:") == false) .collect(Collectors.toList()); - final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); + final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -245,8 +255,6 @@ public void testCgroupProbeWithMissingMemory() { } public void testGetTotalMemFromProcMeminfo() throws Exception { - int cgroupsVersion = randomFrom(1, 2); - // missing MemTotal line var meminfoLines = Arrays.asList( "MemFree: 8467692 kB", @@ -257,7 +265,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - OsProbe probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); + OsProbe probe = buildStubOsProbe(true, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with invalid value @@ -271,7 +279,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); + probe = buildStubOsProbe(true, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with invalid unit @@ -285,7 +293,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); + probe = buildStubOsProbe(true, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with random valid value @@ -300,7 +308,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); + probe = buildStubOsProbe(true, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(memTotalInKb * 1024L)); } @@ -311,13 +319,7 @@ public void testGetTotalMemoryOnDebian8() throws Exception { assertThat(osProbe.getTotalPhysicalMemorySize(), greaterThan(0L)); } - private static List getProcSelfGroupLines(int cgroupsVersion, String hierarchy) { - // It doesn't really matter if cgroupsVersion == 0 here - - if (cgroupsVersion == 2) { - return List.of("0::/" + hierarchy); - } - + private static List getProcSelfGroupLines(String hierarchy) { return Arrays.asList( "10:freezer:/", "9:net_cls,net_prio:/", @@ -329,40 +331,32 @@ private static List getProcSelfGroupLines(int cgroupsVersion, String hie "3:perf_event:/", "2:cpu,cpuacct,cpuset:/" + hierarchy, "1:name=systemd:/user.slice/user-1000.slice/session-2359.scope", - "0::/cgroup2" - ); + "0::/cgroup2"); } - private static OsProbe buildStubOsProbe(final int availableCgroupsVersion, final String hierarchy) { - List procSelfCgroupLines = getProcSelfGroupLines(availableCgroupsVersion, hierarchy); + private static OsProbe buildStubOsProbe(final boolean areCgroupStatsAvailable, final String hierarchy) { + List procSelfCgroupLines = getProcSelfGroupLines(hierarchy); - return buildStubOsProbe(availableCgroupsVersion, hierarchy, procSelfCgroupLines); + return buildStubOsProbe(areCgroupStatsAvailable, hierarchy, procSelfCgroupLines); } /** * Builds a test instance of OsProbe. Methods that ordinarily read from the filesystem are overridden to return values based upon * the arguments to this method. * - * @param availableCgroupsVersion what version of cgroups are available, 1 or 2, or 0 for no cgroups. Normally OsProbe establishes this - * for itself. + * @param areCgroupStatsAvailable whether or not cgroup data is available. Normally OsProbe establishes this for itself. * @param hierarchy a mock value used to generate a cgroup hierarchy. * @param procSelfCgroupLines the lines that will be used as the content of /proc/self/cgroup * @param procMeminfoLines lines that will be used as the content of /proc/meminfo * @return a test instance */ private static OsProbe buildStubOsProbe( - final int availableCgroupsVersion, + final boolean areCgroupStatsAvailable, final String hierarchy, List procSelfCgroupLines, List procMeminfoLines ) { return new OsProbe() { - @Override - OsStats.Cgroup getCgroup(boolean isLinux) { - // Pretend we're always on Linux so that we can run the cgroup tests - return super.getCgroup(true); - } - @Override List readProcSelfCgroup() { return procSelfCgroupLines; @@ -388,7 +382,10 @@ String readSysFsCgroupCpuAcctCpuAcctCfsQuota(String controlGroup) { @Override List readSysFsCgroupCpuAcctCpuStat(String controlGroup) { - return Arrays.asList("nr_periods 17992", "nr_throttled 1311", "throttled_time 139298645489"); + return Arrays.asList( + "nr_periods 17992", + "nr_throttled 1311", + "throttled_time 139298645489"); } @Override @@ -406,50 +403,22 @@ String readSysFsCgroupMemoryUsageInBytes(String controlGroup) { @Override boolean areCgroupStatsAvailable() { - return availableCgroupsVersion > 0; + return areCgroupStatsAvailable; } @Override - List readProcMeminfo() { + List readProcMeminfo() throws IOException { return procMeminfoLines; } - - @Override - String readSysFsCgroupV2MemoryLimitInBytes(String controlGroup) { - assertThat(controlGroup, equalTo("/" + hierarchy)); - // This is the highest value that can be stored in an unsigned 64 bit number, hence too big for long - return "18446744073709551615"; - } - - @Override - String readSysFsCgroupV2MemoryUsageInBytes(String controlGroup) { - assertThat(controlGroup, equalTo("/" + hierarchy)); - return "4796416"; - } - - @Override - List readCgroupV2CpuStats(String controlGroup) { - assertThat(controlGroup, equalTo("/" + hierarchy)); - return List.of( - "usage_usec 364869866063112", - "user_usec 34636", - "system_usec 9896", - "nr_periods 17992", - "nr_throttled 1311", - "throttled_usec 139298645489" - ); - } - - @Override - String readCgroupV2CpuLimit(String controlGroup) { - assertThat(controlGroup, equalTo("/" + hierarchy)); - return "50000 100000"; - } }; } - private static OsProbe buildStubOsProbe(final int availableCgroupsVersion, final String hierarchy, List procSelfCgroupLines) { - return buildStubOsProbe(availableCgroupsVersion, hierarchy, procSelfCgroupLines, List.of()); + private static OsProbe buildStubOsProbe( + final boolean areCgroupStatsAvailable, + final String hierarchy, + List procSelfCgroupLines + ) { + return buildStubOsProbe(areCgroupStatsAvailable, hierarchy, procSelfCgroupLines, List.of()); } } From 46dd25210086529487aa5198422e4a0a26d51730 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 1 Sep 2021 11:55:14 +0200 Subject: [PATCH 055/128] Replace Lucene DataInput/DataOutput with Elasticsearch StreamInput/StreamOutput when reading/writing Histogram doc values (#77100) This commit replaces lucene DataInput / DataOutput with Elasticsearch StreamInput / StreamOutput abstractions. --- .../mapper/HistogramFieldMapper.java | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java index 7d91492aeaced..a7f37721899cb 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java @@ -16,10 +16,10 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Query; import org.apache.lucene.search.SortField; -import org.apache.lucene.store.ByteArrayDataInput; -import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentParser; @@ -341,7 +341,7 @@ public void parse(DocumentParserContext context) throws IOException { + "]" ); } - ByteBuffersDataOutput dataOutput = new ByteBuffersDataOutput(); + BytesStreamOutput streamOutput = new BytesStreamOutput(); for (int i = 0; i < values.size(); i++) { int count = counts.get(i); if (count < 0) { @@ -350,11 +350,11 @@ public void parse(DocumentParserContext context) throws IOException { ); } else if (count > 0) { // we do not add elements with count == 0 - dataOutput.writeVInt(count); - dataOutput.writeLong(Double.doubleToRawLongBits(values.get(i))); + streamOutput.writeVInt(count); + streamOutput.writeLong(Double.doubleToRawLongBits(values.get(i))); } } - BytesRef docValue = new BytesRef(dataOutput.toArrayCopy(), 0, Math.toIntExact(dataOutput.size())); + BytesRef docValue = streamOutput.bytes().toBytesRef(); Field field = new BinaryDocValuesField(name(), docValue); if (context.doc().getByKey(fieldType().name()) != null) { throw new IllegalArgumentException( @@ -386,25 +386,25 @@ private static class InternalHistogramValue extends HistogramValue { double value; int count; boolean isExhausted; - ByteArrayDataInput dataInput; + ByteArrayStreamInput streamInput; InternalHistogramValue() { - dataInput = new ByteArrayDataInput(); + streamInput = new ByteArrayStreamInput(); } /** reset the value for the histogram */ void reset(BytesRef bytesRef) { - dataInput.reset(bytesRef.bytes, bytesRef.offset, bytesRef.length); + streamInput.reset(bytesRef.bytes, bytesRef.offset, bytesRef.length); isExhausted = false; value = 0; count = 0; } @Override - public boolean next() { - if (dataInput.eof() == false) { - count = dataInput.readVInt(); - value = Double.longBitsToDouble(dataInput.readLong()); + public boolean next() throws IOException { + if (streamInput.available() > 0) { + count = streamInput.readVInt(); + value = Double.longBitsToDouble(streamInput.readLong()); return true; } isExhausted = true; From 54d4737b23f0bb468ace66b249f272d7fea1df59 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 1 Sep 2021 11:46:49 +0100 Subject: [PATCH 056/128] Reapply "Handle cgroups v2 in `OsProbe` (#76883)" (#77106) Re-apply #76883. Somehow a line was missed from security.policy. --- .../packaging/test/DockerTests.java | 1 - .../org/elasticsearch/monitor/os/OsProbe.java | 263 ++++++++++++++---- .../elasticsearch/bootstrap/security.policy | 7 + .../monitor/os/OsProbeTests.java | 125 +++++---- 4 files changed, 293 insertions(+), 103 deletions(-) diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index d5fd91a427bac..45689f0fed691 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -849,7 +849,6 @@ public void test131InitProcessHasCorrectPID() { /** * Check that Elasticsearch reports per-node cgroup information. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/76812") public void test140CgroupOsStatsAreAvailable() throws Exception { waitForElasticsearch(installation, USERNAME, PASSWORD); diff --git a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java index a20b7c7e27396..87a068f8ba7ff 100644 --- a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java +++ b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java @@ -30,25 +30,31 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.Stream; /** * The {@link OsProbe} class retrieves information about the physical and swap size of the machine * memory, as well as the system load average and cpu load. * - * In some exceptional cases, it's possible the underlying native methods used by + *

In some exceptional cases, it's possible the underlying native methods used by * {@link #getFreePhysicalMemorySize()}, {@link #getTotalPhysicalMemorySize()}, * {@link #getFreeSwapSpaceSize()}, and {@link #getTotalSwapSpaceSize()} can return a * negative value. Because of this, we prevent those methods from returning negative values, * returning 0 instead. * - * The OS can report a negative number in a number of cases: - * - Non-supported OSes (HP-UX, or AIX) - * - A failure of macOS to initialize host statistics - * - An OS that does not support the {@code _SC_PHYS_PAGES} or {@code _SC_PAGE_SIZE} flags for the {@code sysconf()} linux kernel call - * - An overflow of the product of {@code _SC_PHYS_PAGES} and {@code _SC_PAGE_SIZE} - * - An error case retrieving these values from a linux kernel - * - A non-standard libc implementation not implementing the required values - * For a more exhaustive explanation, see https://github.com/elastic/elasticsearch/pull/42725 + *

The OS can report a negative number in a number of cases: + * + *

    + *
  • Non-supported OSes (HP-UX, or AIX) + *
  • A failure of macOS to initialize host statistics + *
  • An OS that does not support the {@code _SC_PHYS_PAGES} or {@code _SC_PAGE_SIZE} flags for the {@code sysconf()} linux kernel call + *
  • An overflow of the product of {@code _SC_PHYS_PAGES} and {@code _SC_PAGE_SIZE} + *
  • An error case retrieving these values from a linux kernel + *
  • A non-standard libc implementation not implementing the required values + *
+ * + *

For a more exhaustive explanation, see https://github.com/elastic/elasticsearch/pull/42725 */ public class OsProbe { @@ -178,7 +184,7 @@ final double[] getSystemLoadAverage() { final String procLoadAvg = readProcLoadavg(); assert procLoadAvg.matches("(\\d+\\.\\d+\\s+){3}\\d+/\\d+\\s+\\d+"); final String[] fields = procLoadAvg.split("\\s+"); - return new double[]{Double.parseDouble(fields[0]), Double.parseDouble(fields[1]), Double.parseDouble(fields[2])}; + return new double[] { Double.parseDouble(fields[0]), Double.parseDouble(fields[1]), Double.parseDouble(fields[2]) }; } catch (final IOException e) { if (logger.isDebugEnabled()) { logger.debug("error reading /proc/loadavg", e); @@ -192,7 +198,7 @@ final double[] getSystemLoadAverage() { } try { final double oneMinuteLoadAverage = (double) getSystemLoadAverage.invoke(osMxBean); - return new double[]{oneMinuteLoadAverage >= 0 ? oneMinuteLoadAverage : -1, -1, -1}; + return new double[] { oneMinuteLoadAverage >= 0 ? oneMinuteLoadAverage : -1, -1, -1 }; } catch (IllegalAccessException | InvocationTargetException e) { if (logger.isDebugEnabled()) { logger.debug("error reading one minute load average from operating system", e); @@ -318,6 +324,23 @@ String readSysFsCgroupCpuAcctCpuAcctUsage(final String controlGroup) throws IOEx return readSingleLine(PathUtils.get("/sys/fs/cgroup/cpuacct", controlGroup, "cpuacct.usage")); } + private long[] getCgroupV2CpuLimit(String controlGroup) throws IOException { + String entry = readCgroupV2CpuLimit(controlGroup); + String[] parts = entry.split("\\s+"); + assert parts.length == 2 : "Expected 2 fields in [cpu.max]"; + + long[] values = new long[2]; + + values[0] = "max".equals(parts[0]) ? -1L : Long.parseLong(parts[0]); + values[1] = Long.parseLong(parts[1]); + return values; + } + + @SuppressForbidden(reason = "access /sys/fs/cgroup/cpu.max") + String readCgroupV2CpuLimit(String controlGroup) throws IOException { + return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "cpu.max")); + } + /** * The total period of time in microseconds for how frequently the Elasticsearch control group's access to CPU resources will be * reallocated. @@ -454,6 +477,35 @@ String readSysFsCgroupMemoryLimitInBytes(final String controlGroup) throws IOExc return readSingleLine(PathUtils.get("/sys/fs/cgroup/memory", controlGroup, "memory.limit_in_bytes")); } + /** + * The maximum amount of user memory (including file cache). + * If there is no limit then some Linux versions return the maximum value that can be stored in an + * unsigned 64 bit number, and this will overflow a long, hence the result type is String. + * (The alternative would have been BigInteger but then it would not be possible to index + * the OS stats document into Elasticsearch without losing information, as BigInteger is + * not a supported Elasticsearch type.) + * + * @param controlGroup the control group for the Elasticsearch process for the {@code memory} subsystem + * @return the maximum amount of user memory (including file cache) + * @throws IOException if an I/O exception occurs reading {@code memory.limit_in_bytes} for the control group + */ + private String getCgroupV2MemoryLimitInBytes(final String controlGroup) throws IOException { + return readSysFsCgroupV2MemoryLimitInBytes(controlGroup); + } + + /** + * Returns the line from {@code memory.max} for the control group to which the Elasticsearch process belongs for the + * {@code memory} subsystem. This line represents the maximum amount of user memory (including file cache). + * + * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem + * @return the line from {@code memory.max} + * @throws IOException if an I/O exception occurs reading {@code memory.max} for the control group + */ + @SuppressForbidden(reason = "access /sys/fs/cgroup/memory.max") + String readSysFsCgroupV2MemoryLimitInBytes(final String controlGroup) throws IOException { + return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "memory.max")); + } + /** * The total current memory usage by processes in the cgroup (in bytes). * If there is no limit then some Linux versions return the maximum value that can be stored in an @@ -483,6 +535,35 @@ String readSysFsCgroupMemoryUsageInBytes(final String controlGroup) throws IOExc return readSingleLine(PathUtils.get("/sys/fs/cgroup/memory", controlGroup, "memory.usage_in_bytes")); } + /** + * The total current memory usage by processes in the cgroup (in bytes). + * If there is no limit then some Linux versions return the maximum value that can be stored in an + * unsigned 64 bit number, and this will overflow a long, hence the result type is String. + * (The alternative would have been BigInteger but then it would not be possible to index + * the OS stats document into Elasticsearch without losing information, as BigInteger is + * not a supported Elasticsearch type.) + * + * @param controlGroup the control group for the Elasticsearch process for the {@code memory} subsystem + * @return the total current memory usage by processes in the cgroup (in bytes) + * @throws IOException if an I/O exception occurs reading {@code memory.current} for the control group + */ + private String getCgroupV2MemoryUsageInBytes(final String controlGroup) throws IOException { + return readSysFsCgroupV2MemoryUsageInBytes(controlGroup); + } + + /** + * Returns the line from {@code memory.current} for the control group to which the Elasticsearch process belongs for the + * {@code memory} subsystem. This line represents the total current memory usage by processes in the cgroup (in bytes). + * + * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem + * @return the line from {@code memory.current} + * @throws IOException if an I/O exception occurs reading {@code memory.current} for the control group + */ + @SuppressForbidden(reason = "access /sys/fs/cgroup/memory.current") + String readSysFsCgroupV2MemoryUsageInBytes(final String controlGroup) throws IOException { + return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "memory.current")); + } + /** * Checks if cgroup stats are available by checking for the existence of {@code /proc/self/cgroup}, {@code /sys/fs/cgroup/cpu}, * {@code /sys/fs/cgroup/cpuacct} and {@code /sys/fs/cgroup/memory}. @@ -490,20 +571,51 @@ String readSysFsCgroupMemoryUsageInBytes(final String controlGroup) throws IOExc * @return {@code true} if the stats are available, otherwise {@code false} */ @SuppressForbidden(reason = "access /proc/self/cgroup, /sys/fs/cgroup/cpu, /sys/fs/cgroup/cpuacct and /sys/fs/cgroup/memory") - boolean areCgroupStatsAvailable() { + boolean areCgroupStatsAvailable() throws IOException { if (Files.exists(PathUtils.get("/proc/self/cgroup")) == false) { return false; } - if (Files.exists(PathUtils.get("/sys/fs/cgroup/cpu")) == false) { - return false; - } - if (Files.exists(PathUtils.get("/sys/fs/cgroup/cpuacct")) == false) { - return false; + + List lines = readProcSelfCgroup(); + + // cgroup v2 + if (lines.size() == 1 && lines.get(0).startsWith("0::")) { + return Stream.of("/sys/fs/cgroup/cpu.stat", "/sys/fs/cgroup/memory.stat").allMatch(path -> Files.exists(PathUtils.get(path))); } - if (Files.exists(PathUtils.get("/sys/fs/cgroup/memory")) == false) { - return false; + + return Stream.of("/sys/fs/cgroup/cpu", "/sys/fs/cgroup/cpuacct", "/sys/fs/cgroup/memory") + .allMatch(path -> Files.exists(PathUtils.get(path))); + } + + /** + * The CPU statistics for all tasks in the Elasticsearch control group. + * + * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem + * @return the CPU statistics + * @throws IOException if an I/O exception occurs reading {@code cpu.stat} for the control group + */ + private Map getCgroupV2CpuStats(String controlGroup) throws IOException { + final List lines = readCgroupV2CpuStats(controlGroup); + final Map stats = new HashMap<>(); + + for (String line : lines) { + String[] parts = line.split("\\s+"); + assert parts.length == 2 : "Corrupt cpu.stat line: [" + line + "]"; + stats.put(parts[0], Long.parseLong(parts[1])); } - return true; + + final List expectedKeys = List.of("nr_periods", "nr_throttled", "system_usec", "throttled_usec", "usage_usec", "user_usec"); + expectedKeys.forEach(key -> { + assert stats.containsKey(key) : key; + assert stats.get(key) != -1 : stats.get(key); + }); + + return stats; + } + + @SuppressForbidden(reason = "access /sys/fs/cgroup/cpu.stat") + List readCgroupV2CpuStats(final String controlGroup) throws IOException { + return Files.readAllLines(PathUtils.get("/sys/fs/cgroup", controlGroup, "cpu.stat")); } /** @@ -515,45 +627,79 @@ private OsStats.Cgroup getCgroup() { try { if (areCgroupStatsAvailable() == false) { return null; - } else { - final Map controllerMap = getControlGroups(); - assert controllerMap.isEmpty() == false; + } + + final Map controllerMap = getControlGroups(); + assert controllerMap.isEmpty() == false; - final String cpuAcctControlGroup = controllerMap.get("cpuacct"); + final String cpuAcctControlGroup; + final long cgroupCpuAcctUsageNanos; + final long cgroupCpuAcctCpuCfsPeriodMicros; + final long cgroupCpuAcctCpuCfsQuotaMicros; + final String cpuControlGroup; + final OsStats.Cgroup.CpuStat cpuStat; + final String memoryControlGroup; + final String cgroupMemoryLimitInBytes; + final String cgroupMemoryUsageInBytes; + + if (controllerMap.size() == 1 && controllerMap.containsKey("")) { + // There's a single hierarchy for all controllers + cpuControlGroup = cpuAcctControlGroup = memoryControlGroup = controllerMap.get(""); + + // `cpuacct` was merged with `cpu` in v2 + final Map cpuStatsMap = getCgroupV2CpuStats(cpuControlGroup); + + cgroupCpuAcctUsageNanos = cpuStatsMap.get("usage_usec"); + + long[] cpuLimits = getCgroupV2CpuLimit(cpuControlGroup); + cgroupCpuAcctCpuCfsQuotaMicros = cpuLimits[0]; + cgroupCpuAcctCpuCfsPeriodMicros = cpuLimits[1]; + + cpuStat = new OsStats.Cgroup.CpuStat( + cpuStatsMap.get("nr_periods"), + cpuStatsMap.get("nr_throttled"), + cpuStatsMap.get("throttled_usec") + ); + + cgroupMemoryLimitInBytes = getCgroupV2MemoryLimitInBytes(memoryControlGroup); + cgroupMemoryUsageInBytes = getCgroupV2MemoryUsageInBytes(memoryControlGroup); + } else { + cpuAcctControlGroup = controllerMap.get("cpuacct"); if (cpuAcctControlGroup == null) { logger.debug("no [cpuacct] data found in cgroup stats"); return null; } - final long cgroupCpuAcctUsageNanos = getCgroupCpuAcctUsageNanos(cpuAcctControlGroup); + cgroupCpuAcctUsageNanos = getCgroupCpuAcctUsageNanos(cpuAcctControlGroup); - final String cpuControlGroup = controllerMap.get("cpu"); + cpuControlGroup = controllerMap.get("cpu"); if (cpuControlGroup == null) { logger.debug("no [cpu] data found in cgroup stats"); return null; } - final long cgroupCpuAcctCpuCfsPeriodMicros = getCgroupCpuAcctCpuCfsPeriodMicros(cpuControlGroup); - final long cgroupCpuAcctCpuCfsQuotaMicros = getCgroupCpuAcctCpuCfsQuotaMicros(cpuControlGroup); - final OsStats.Cgroup.CpuStat cpuStat = getCgroupCpuAcctCpuStat(cpuControlGroup); + cgroupCpuAcctCpuCfsPeriodMicros = getCgroupCpuAcctCpuCfsPeriodMicros(cpuControlGroup); + cgroupCpuAcctCpuCfsQuotaMicros = getCgroupCpuAcctCpuCfsQuotaMicros(cpuControlGroup); + cpuStat = getCgroupCpuAcctCpuStat(cpuControlGroup); - final String memoryControlGroup = controllerMap.get("memory"); + memoryControlGroup = controllerMap.get("memory"); if (memoryControlGroup == null) { logger.debug("no [memory] data found in cgroup stats"); return null; } - final String cgroupMemoryLimitInBytes = getCgroupMemoryLimitInBytes(memoryControlGroup); - final String cgroupMemoryUsageInBytes = getCgroupMemoryUsageInBytes(memoryControlGroup); - - return new OsStats.Cgroup( - cpuAcctControlGroup, - cgroupCpuAcctUsageNanos, - cpuControlGroup, - cgroupCpuAcctCpuCfsPeriodMicros, - cgroupCpuAcctCpuCfsQuotaMicros, - cpuStat, - memoryControlGroup, - cgroupMemoryLimitInBytes, - cgroupMemoryUsageInBytes); + cgroupMemoryLimitInBytes = getCgroupMemoryLimitInBytes(memoryControlGroup); + cgroupMemoryUsageInBytes = getCgroupMemoryUsageInBytes(memoryControlGroup); } + + return new OsStats.Cgroup( + cpuAcctControlGroup, + cgroupCpuAcctUsageNanos, + cpuControlGroup, + cgroupCpuAcctCpuCfsPeriodMicros, + cgroupCpuAcctCpuCfsQuotaMicros, + cpuStat, + memoryControlGroup, + cgroupMemoryLimitInBytes, + cgroupMemoryUsageInBytes + ); } catch (final IOException e) { logger.debug("error reading control group stats", e); return null; @@ -576,13 +722,14 @@ public static OsProbe getInstance() { OsInfo osInfo(long refreshInterval, int allocatedProcessors) throws IOException { return new OsInfo( - refreshInterval, - Runtime.getRuntime().availableProcessors(), - allocatedProcessors, - Constants.OS_NAME, - getPrettyName(), - Constants.OS_ARCH, - Constants.OS_VERSION); + refreshInterval, + Runtime.getRuntime().availableProcessors(), + allocatedProcessors, + Constants.OS_NAME, + getPrettyName(), + Constants.OS_ARCH, + Constants.OS_VERSION + ); } private String getPrettyName() throws IOException { @@ -594,11 +741,13 @@ private String getPrettyName() throws IOException { * wrapped in single- or double-quotes. */ final List etcOsReleaseLines = readOsRelease(); - final List prettyNameLines = - etcOsReleaseLines.stream().filter(line -> line.startsWith("PRETTY_NAME")).collect(Collectors.toList()); + final List prettyNameLines = etcOsReleaseLines.stream() + .filter(line -> line.startsWith("PRETTY_NAME")) + .collect(Collectors.toList()); assert prettyNameLines.size() <= 1 : prettyNameLines; - final Optional maybePrettyNameLine = - prettyNameLines.size() == 1 ? Optional.of(prettyNameLines.get(0)) : Optional.empty(); + final Optional maybePrettyNameLine = prettyNameLines.size() == 1 + ? Optional.of(prettyNameLines.get(0)) + : Optional.empty(); if (maybePrettyNameLine.isPresent()) { // we trim since some OS contain trailing space, for example, Oracle Linux Server 6.9 has a trailing space after the quote final String trimmedPrettyNameLine = maybePrettyNameLine.get().trim(); @@ -695,11 +844,15 @@ boolean isDebian8() throws IOException { return Constants.LINUX && getPrettyName().equals("Debian GNU/Linux 8 (jessie)"); } + OsStats.Cgroup getCgroup(boolean isLinux) { + return isLinux ? getCgroup() : null; + } + public OsStats osStats() { final OsStats.Cpu cpu = new OsStats.Cpu(getSystemCpuPercent(), getSystemLoadAverage()); final OsStats.Mem mem = new OsStats.Mem(getTotalPhysicalMemorySize(), getFreePhysicalMemorySize()); final OsStats.Swap swap = new OsStats.Swap(getTotalSwapSpaceSize(), getFreeSwapSpaceSize()); - final OsStats.Cgroup cgroup = Constants.LINUX ? getCgroup() : null; + final OsStats.Cgroup cgroup = getCgroup(Constants.LINUX); return new OsStats(System.currentTimeMillis(), cpu, mem, swap, cgroup); } diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy index 8ffc0d0eea47d..56d9f45c61a16 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -153,6 +153,13 @@ grant { permission java.io.FilePermission "/sys/fs/cgroup/memory", "read"; permission java.io.FilePermission "/sys/fs/cgroup/memory/-", "read"; + // control group v2 stats on linux + permission java.io.FilePermission "/sys/fs/cgroup/cpu.max", "read"; + permission java.io.FilePermission "/sys/fs/cgroup/cpu.stat", "read"; + permission java.io.FilePermission "/sys/fs/cgroup/memory.current", "read"; + permission java.io.FilePermission "/sys/fs/cgroup/memory.max", "read"; + permission java.io.FilePermission "/sys/fs/cgroup/memory.stat", "read"; + // system memory on Linux systems affected by JDK bug (#66629) permission java.io.FilePermission "/proc/meminfo", "read"; }; diff --git a/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index 8be3723d72cc3..ac802cf738500 100644 --- a/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -43,7 +43,7 @@ public void testOsInfo() throws IOException { final OsProbe osProbe = new OsProbe() { @Override - List readOsRelease() throws IOException { + List readOsRelease() { assert Constants.LINUX : Constants.OS_NAME; if (prettyName != null) { final String quote = randomFrom("\"", "'", ""); @@ -78,8 +78,10 @@ public void testOsStats() { OsStats stats = osProbe.osStats(); assertNotNull(stats); assertThat(stats.getTimestamp(), greaterThan(0L)); - assertThat(stats.getCpu().getPercent(), anyOf(equalTo((short) -1), - is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100))))); + assertThat( + stats.getCpu().getPercent(), + anyOf(equalTo((short) -1), is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100)))) + ); double[] loadAverage = stats.getCpu().getLoadAverage(); if (loadAverage != null) { assertThat(loadAverage.length, equalTo(3)); @@ -173,16 +175,14 @@ String readProcLoadavg() { } public void testCgroupProbe() { - assumeTrue("test runs on Linux only", Constants.LINUX); - - final boolean areCgroupStatsAvailable = randomBoolean(); + final int availableCgroupsVersion = randomFrom(0, 1, 2); final String hierarchy = randomAlphaOfLength(16); - final OsProbe probe = buildStubOsProbe(areCgroupStatsAvailable, hierarchy); + final OsProbe probe = buildStubOsProbe(availableCgroupsVersion, hierarchy); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); - if (areCgroupStatsAvailable) { + if (availableCgroupsVersion > 0) { assertNotNull(cgroup); assertThat(cgroup.getCpuAcctControlGroup(), equalTo("/" + hierarchy)); assertThat(cgroup.getCpuAcctUsageNanos(), equalTo(364869866063112L)); @@ -200,17 +200,14 @@ public void testCgroupProbe() { } public void testCgroupProbeWithMissingCpuAcct() { - assumeTrue("test runs on Linux only", Constants.LINUX); - final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about cpuacct - List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) - .stream() + List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() .map(line -> line.replaceFirst(",cpuacct", "")) .collect(Collectors.toList()); - final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); + final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -218,18 +215,14 @@ public void testCgroupProbeWithMissingCpuAcct() { } public void testCgroupProbeWithMissingCpu() { - assumeTrue("test runs on Linux only", Constants.LINUX); - final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about cpu - List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) - .stream() + List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() .map(line -> line.replaceFirst(":cpu,", ":")) .collect(Collectors.toList()); - - final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); + final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -237,17 +230,14 @@ public void testCgroupProbeWithMissingCpu() { } public void testCgroupProbeWithMissingMemory() { - assumeTrue("test runs on Linux only", Constants.LINUX); - final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about memory - List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) - .stream() + List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() .filter(line -> line.contains(":memory:") == false) .collect(Collectors.toList()); - final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); + final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -255,6 +245,8 @@ public void testCgroupProbeWithMissingMemory() { } public void testGetTotalMemFromProcMeminfo() throws Exception { + int cgroupsVersion = randomFrom(1, 2); + // missing MemTotal line var meminfoLines = Arrays.asList( "MemFree: 8467692 kB", @@ -265,7 +257,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - OsProbe probe = buildStubOsProbe(true, "", List.of(), meminfoLines); + OsProbe probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with invalid value @@ -279,7 +271,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(true, "", List.of(), meminfoLines); + probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with invalid unit @@ -293,7 +285,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(true, "", List.of(), meminfoLines); + probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with random valid value @@ -308,7 +300,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(true, "", List.of(), meminfoLines); + probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(memTotalInKb * 1024L)); } @@ -319,7 +311,13 @@ public void testGetTotalMemoryOnDebian8() throws Exception { assertThat(osProbe.getTotalPhysicalMemorySize(), greaterThan(0L)); } - private static List getProcSelfGroupLines(String hierarchy) { + private static List getProcSelfGroupLines(int cgroupsVersion, String hierarchy) { + // It doesn't really matter if cgroupsVersion == 0 here + + if (cgroupsVersion == 2) { + return List.of("0::/" + hierarchy); + } + return Arrays.asList( "10:freezer:/", "9:net_cls,net_prio:/", @@ -331,32 +329,40 @@ private static List getProcSelfGroupLines(String hierarchy) { "3:perf_event:/", "2:cpu,cpuacct,cpuset:/" + hierarchy, "1:name=systemd:/user.slice/user-1000.slice/session-2359.scope", - "0::/cgroup2"); + "0::/cgroup2" + ); } - private static OsProbe buildStubOsProbe(final boolean areCgroupStatsAvailable, final String hierarchy) { - List procSelfCgroupLines = getProcSelfGroupLines(hierarchy); + private static OsProbe buildStubOsProbe(final int availableCgroupsVersion, final String hierarchy) { + List procSelfCgroupLines = getProcSelfGroupLines(availableCgroupsVersion, hierarchy); - return buildStubOsProbe(areCgroupStatsAvailable, hierarchy, procSelfCgroupLines); + return buildStubOsProbe(availableCgroupsVersion, hierarchy, procSelfCgroupLines); } /** * Builds a test instance of OsProbe. Methods that ordinarily read from the filesystem are overridden to return values based upon * the arguments to this method. * - * @param areCgroupStatsAvailable whether or not cgroup data is available. Normally OsProbe establishes this for itself. + * @param availableCgroupsVersion what version of cgroups are available, 1 or 2, or 0 for no cgroups. Normally OsProbe establishes this + * for itself. * @param hierarchy a mock value used to generate a cgroup hierarchy. * @param procSelfCgroupLines the lines that will be used as the content of /proc/self/cgroup * @param procMeminfoLines lines that will be used as the content of /proc/meminfo * @return a test instance */ private static OsProbe buildStubOsProbe( - final boolean areCgroupStatsAvailable, + final int availableCgroupsVersion, final String hierarchy, List procSelfCgroupLines, List procMeminfoLines ) { return new OsProbe() { + @Override + OsStats.Cgroup getCgroup(boolean isLinux) { + // Pretend we're always on Linux so that we can run the cgroup tests + return super.getCgroup(true); + } + @Override List readProcSelfCgroup() { return procSelfCgroupLines; @@ -382,10 +388,7 @@ String readSysFsCgroupCpuAcctCpuAcctCfsQuota(String controlGroup) { @Override List readSysFsCgroupCpuAcctCpuStat(String controlGroup) { - return Arrays.asList( - "nr_periods 17992", - "nr_throttled 1311", - "throttled_time 139298645489"); + return Arrays.asList("nr_periods 17992", "nr_throttled 1311", "throttled_time 139298645489"); } @Override @@ -403,22 +406,50 @@ String readSysFsCgroupMemoryUsageInBytes(String controlGroup) { @Override boolean areCgroupStatsAvailable() { - return areCgroupStatsAvailable; + return availableCgroupsVersion > 0; } @Override - List readProcMeminfo() throws IOException { + List readProcMeminfo() { return procMeminfoLines; } + + @Override + String readSysFsCgroupV2MemoryLimitInBytes(String controlGroup) { + assertThat(controlGroup, equalTo("/" + hierarchy)); + // This is the highest value that can be stored in an unsigned 64 bit number, hence too big for long + return "18446744073709551615"; + } + + @Override + String readSysFsCgroupV2MemoryUsageInBytes(String controlGroup) { + assertThat(controlGroup, equalTo("/" + hierarchy)); + return "4796416"; + } + + @Override + List readCgroupV2CpuStats(String controlGroup) { + assertThat(controlGroup, equalTo("/" + hierarchy)); + return List.of( + "usage_usec 364869866063112", + "user_usec 34636", + "system_usec 9896", + "nr_periods 17992", + "nr_throttled 1311", + "throttled_usec 139298645489" + ); + } + + @Override + String readCgroupV2CpuLimit(String controlGroup) { + assertThat(controlGroup, equalTo("/" + hierarchy)); + return "50000 100000"; + } }; } - private static OsProbe buildStubOsProbe( - final boolean areCgroupStatsAvailable, - final String hierarchy, - List procSelfCgroupLines - ) { - return buildStubOsProbe(areCgroupStatsAvailable, hierarchy, procSelfCgroupLines, List.of()); + private static OsProbe buildStubOsProbe(final int availableCgroupsVersion, final String hierarchy, List procSelfCgroupLines) { + return buildStubOsProbe(availableCgroupsVersion, hierarchy, procSelfCgroupLines, List.of()); } } From 0920e21445dbec22f3ea95a4ed3c7536a04452b4 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 1 Sep 2021 13:01:58 +0200 Subject: [PATCH 057/128] Implement Sort By Repository Name in Get Snapshots API (#77049) This one is the last sort column not yet implemented but used by Kibana. --- .../apis/get-snapshot-api.asciidoc | 3 ++ .../http/snapshots/RestGetSnapshotsIT.java | 5 +++ .../snapshots/GetSnapshotsIT.java | 38 ++++++++++++++----- .../snapshots/get/GetSnapshotsRequest.java | 17 +++++++-- .../get/TransportGetSnapshotsAction.java | 19 ++++++++++ .../AbstractSnapshotIntegTestCase.java | 3 ++ 6 files changed, 72 insertions(+), 13 deletions(-) diff --git a/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc index bb104f44a979a..a708a20bd8b24 100644 --- a/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc @@ -118,6 +118,9 @@ Allows setting a sort order for the result. Defaults to `start_time`, i.e. sorti `name`:: Sort snapshots by their name. +`repository`:: + Sort snapshots by their repository name and break ties by snapshot name. + `index_count`:: Sort snapshots by the number of indices they contain and break ties by snapshot name. diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java index 1ce13280f594f..3cdc32029babd 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java @@ -96,6 +96,11 @@ private void doTestSortOrder(String repoName, Collection allSnapshotName GetSnapshotsRequest.SortBy.FAILED_SHARDS, order ); + assertSnapshotListSorted( + allSnapshotsSorted(allSnapshotNames, repoName, GetSnapshotsRequest.SortBy.REPOSITORY, order), + GetSnapshotsRequest.SortBy.REPOSITORY, + order + ); } public void testResponseSizeLimit() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java index 0c6d1f4d1d9fa..e558dfbd0bcac 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java @@ -38,21 +38,36 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { } public void testSortBy() throws Exception { - final String repoName = "test-repo"; + final String repoNameA = "test-repo-a"; final Path repoPath = randomRepoPath(); - createRepository(repoName, "fs", repoPath); - maybeInitWithOldSnapshotVersion(repoName, repoPath); - final List snapshotNamesWithoutIndex = createNSnapshots(repoName, randomIntBetween(3, 20)); + createRepository(repoNameA, "fs", repoPath); + maybeInitWithOldSnapshotVersion(repoNameA, repoPath); + final String repoNameB = "test-repo-b"; + createRepository(repoNameB, "fs"); + + final List snapshotNamesWithoutIndexA = createNSnapshots(repoNameA, randomIntBetween(3, 20)); + final List snapshotNamesWithoutIndexB = createNSnapshots(repoNameB, randomIntBetween(3, 20)); createIndexWithContent("test-index"); - final List snapshotNamesWithIndex = createNSnapshots(repoName, randomIntBetween(3, 20)); + final List snapshotNamesWithIndexA = createNSnapshots(repoNameA, randomIntBetween(3, 20)); + final List snapshotNamesWithIndexB = createNSnapshots(repoNameB, randomIntBetween(3, 20)); + + final Collection allSnapshotNamesA = new HashSet<>(snapshotNamesWithIndexA); + final Collection allSnapshotNamesB = new HashSet<>(snapshotNamesWithIndexB); + allSnapshotNamesA.addAll(snapshotNamesWithoutIndexA); + allSnapshotNamesB.addAll(snapshotNamesWithoutIndexB); - final Collection allSnapshotNames = new HashSet<>(snapshotNamesWithIndex); - allSnapshotNames.addAll(snapshotNamesWithoutIndex); + doTestSortOrder(repoNameA, allSnapshotNamesA, SortOrder.ASC); + doTestSortOrder(repoNameA, allSnapshotNamesA, SortOrder.DESC); - doTestSortOrder(repoName, allSnapshotNames, SortOrder.ASC); - doTestSortOrder(repoName, allSnapshotNames, SortOrder.DESC); + doTestSortOrder(repoNameB, allSnapshotNamesB, SortOrder.ASC); + doTestSortOrder(repoNameB, allSnapshotNamesB, SortOrder.DESC); + + final Collection allSnapshots = new HashSet<>(allSnapshotNamesA); + allSnapshots.addAll(allSnapshotNamesB); + doTestSortOrder("*", allSnapshots, SortOrder.ASC); + doTestSortOrder("*", allSnapshots, SortOrder.DESC); } private void doTestSortOrder(String repoName, Collection allSnapshotNames, SortOrder order) { @@ -88,6 +103,11 @@ private void doTestSortOrder(String repoName, Collection allSnapshotName GetSnapshotsRequest.SortBy.FAILED_SHARDS, order ); + assertSnapshotListSorted( + allSnapshotsSorted(allSnapshotNames, repoName, GetSnapshotsRequest.SortBy.REPOSITORY, order), + GetSnapshotsRequest.SortBy.REPOSITORY, + order + ); } public void testResponseSizeLimit() throws Exception { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java index 158acb9092aed..0991b22cb8a36 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java @@ -46,7 +46,7 @@ public class GetSnapshotsRequest extends MasterNodeRequest public static final Version NUMERIC_PAGINATION_VERSION = Version.V_7_15_0; - private static final Version SORT_BY_SHARD_COUNTS_VERSION = Version.V_7_16_0; + private static final Version SORT_BY_SHARDS_OR_REPO_VERSION = Version.V_7_16_0; public static final int NO_LIMIT = -1; @@ -138,8 +138,11 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(verbose); if (out.getVersion().onOrAfter(PAGINATED_GET_SNAPSHOTS_VERSION)) { out.writeOptionalWriteable(after); - if ((sort == SortBy.SHARDS || sort == SortBy.FAILED_SHARDS) && out.getVersion().before(SORT_BY_SHARD_COUNTS_VERSION)) { - throw new IllegalArgumentException("can't use sort by shard count with node version [" + out.getVersion() + "]"); + if ((sort == SortBy.SHARDS || sort == SortBy.FAILED_SHARDS || sort == SortBy.REPOSITORY) + && out.getVersion().before(SORT_BY_SHARDS_OR_REPO_VERSION)) { + throw new IllegalArgumentException( + "can't use sort by shard count or repository name with node version [" + out.getVersion() + "]" + ); } out.writeEnum(sort); out.writeVInt(size); @@ -327,7 +330,8 @@ public enum SortBy { DURATION("duration"), INDICES("index_count"), SHARDS("shard_count"), - FAILED_SHARDS("failed_shard_count"); + FAILED_SHARDS("failed_shard_count"), + REPOSITORY("repository"); private final String param; @@ -354,6 +358,8 @@ public static SortBy of(String value) { return SHARDS; case "failed_shard_count": return FAILED_SHARDS; + case "repository": + return REPOSITORY; default: throw new IllegalArgumentException("unknown sort order [" + value + "]"); } @@ -405,6 +411,9 @@ public static After from(@Nullable SnapshotInfo snapshotInfo, SortBy sortBy) { case FAILED_SHARDS: afterValue = String.valueOf(snapshotInfo.failedShards()); break; + case REPOSITORY: + afterValue = snapshotInfo.repository(); + break; default: throw new AssertionError("unknown sort column [" + sortBy + "]"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 39b1ae3486823..7ac984014a770 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -492,6 +492,9 @@ private static SnapshotsInRepo buildSimpleSnapshotInfos( private static final Comparator BY_NAME = Comparator.comparing(sni -> sni.snapshotId().getName()); + private static final Comparator BY_REPOSITORY = Comparator.comparing(SnapshotInfo::repository) + .thenComparing(SnapshotInfo::snapshotId); + private static SnapshotsInRepo sortSnapshots( final List snapshotInfos, final GetSnapshotsRequest.SortBy sortBy, @@ -520,6 +523,9 @@ private static SnapshotsInRepo sortSnapshots( case FAILED_SHARDS: comparator = BY_FAILED_SHARDS_COUNT; break; + case REPOSITORY: + comparator = BY_REPOSITORY; + break; default: throw new AssertionError("unexpected sort column [" + sortBy + "]"); } @@ -570,6 +576,11 @@ private static SnapshotsInRepo sortSnapshots( order ); break; + case REPOSITORY: + isAfter = order == SortOrder.ASC + ? (info -> compareRepositoryName(snapshotName, repoName, info) < 0) + : (info -> compareRepositoryName(snapshotName, repoName, info) > 0); + break; default: throw new AssertionError("unexpected sort column [" + sortBy + "]"); } @@ -606,6 +617,14 @@ private static Predicate filterByLongOffset( }; } + private static int compareRepositoryName(String name, String repoName, SnapshotInfo info) { + final int res = repoName.compareTo(info.repository()); + if (res != 0) { + return res; + } + return name.compareTo(info.snapshotId().getName()); + } + private static int compareName(String name, String repoName, SnapshotInfo info) { final int res = name.compareTo(info.snapshotId().getName()); if (res != 0) { diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 6d2d3ff103fed..51c714efe7ae5 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -711,6 +711,9 @@ public static void assertSnapshotListSorted(List snapshotInfos, @N case FAILED_SHARDS: assertion = (s1, s2) -> assertThat(s2.failedShards(), greaterThanOrEqualTo(s1.failedShards())); break; + case REPOSITORY: + assertion = (s1, s2) -> assertThat(s2.repository(), greaterThanOrEqualTo(s1.repository())); + break; default: throw new AssertionError("unknown sort column [" + sort + "]"); } From 6a141236dffaeb04f69e67d9a3df1480da4e4ae9 Mon Sep 17 00:00:00 2001 From: Dan Hermann Date: Wed, 1 Sep 2021 06:18:38 -0500 Subject: [PATCH 058/128] Re-enable REST compatibility test after backport of #76752 --- modules/ingest-common/build.gradle | 6 ------ 1 file changed, 6 deletions(-) diff --git a/modules/ingest-common/build.gradle b/modules/ingest-common/build.gradle index 036cde58f9dee..8308ef46d5b02 100644 --- a/modules/ingest-common/build.gradle +++ b/modules/ingest-common/build.gradle @@ -47,9 +47,3 @@ tasks.named("thirdPartyAudit").configure { tasks.named("transformV7RestTests").configure({ task -> task.addAllowedWarningRegex("\\[types removal\\].*") }) - -tasks.named("yamlRestCompatTest").configure { - systemProperty 'tests.rest.blacklist', ([ - 'ingest/120_grok/Test Grok Patterns Retrieval' // un-mute this test after backporting - ]).join(',') -} From 100f222650150da6671c3cc8d2d1fae219307d99 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 1 Sep 2021 07:29:13 -0400 Subject: [PATCH 059/128] Adds support for the rate aggregation under a composite agg (#76992) rate aggregation should support being a sub-aggregation of a composite agg. The catch is that the composite aggregation source must be a date histogram. Other sources can be present but their must be exactly one date histogram source otherwise the rate aggregation does not know which interval to compare its unit rate to. closes https://github.com/elastic/elasticsearch/issues/76988 --- .../metrics/rate-aggregation.asciidoc | 142 +++++++++++- .../bucket/composite/CompositeAggregator.java | 35 ++- .../composite/DateHistogramValuesSource.java | 56 +++++ .../DateHistogramValuesSourceBuilder.java | 5 +- .../composite/RoundingValuesSource.java | 8 + .../rate/AbstractRateAggregator.java | 6 +- .../analytics/rate/RateAggregatorTests.java | 212 ++++++++++++++---- 7 files changed, 415 insertions(+), 49 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSource.java diff --git a/docs/reference/aggregations/metrics/rate-aggregation.asciidoc b/docs/reference/aggregations/metrics/rate-aggregation.asciidoc index ffc9d30c4bf91..cb1f903f6443c 100644 --- a/docs/reference/aggregations/metrics/rate-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/rate-aggregation.asciidoc @@ -6,10 +6,12 @@ Rate ++++ -A `rate` metrics aggregation can be used only inside a `date_histogram` and calculates a rate of documents or a field in each -`date_histogram` bucket. The field values can be generated extracted from specific numeric or +A `rate` metrics aggregation can be used only inside a `date_histogram` or `composite` aggregation. It calculates a rate of documents +or a field in each bucket. The field values can be generated extracted from specific numeric or <> in the documents. +NOTE: For `composite` aggregations, there must be exactly one `date_histogram` source for the `rate` aggregation to be supported. + ==== Syntax A `rate` aggregation looks like this in isolation: @@ -167,6 +169,142 @@ The response will contain the average daily sale prices for each month. -------------------------------------------------- // TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] +You can also take advantage of `composite` aggregations to calculate the average daily sale price for each item in +your inventory + +[source,console] +-------------------------------------------------- +GET sales/_search?filter_path=aggregations&size=0 +{ + "aggs": { + "buckets": { + "composite": { <1> + "sources": [ + { + "month": { + "date_histogram": { <2> + "field": "date", + "calendar_interval": "month" + } + } + }, + { + "type": { <3> + "terms": { + "field": "type" + } + } + } + ] + }, + "aggs": { + "avg_price": { + "rate": { + "field": "price", <4> + "unit": "day" <5> + } + } + } + } + } +} +-------------------------------------------------- +// TEST[setup:sales] +<1> Composite aggregation with a date histogram source + and a source for the item type. +<2> The date histogram source grouping monthly +<3> The terms source grouping for each sale item type +<4> Calculate sum of all sale prices, per month and item +<5> Convert to average daily sales per item + +The response will contain the average daily sale prices for each month per item. + +[source,console-result] +-------------------------------------------------- +{ + "aggregations" : { + "buckets" : { + "after_key" : { + "month" : 1425168000000, + "type" : "t-shirt" + }, + "buckets" : [ + { + "key" : { + "month" : 1420070400000, + "type" : "bag" + }, + "doc_count" : 1, + "avg_price" : { + "value" : 4.838709677419355 + } + }, + { + "key" : { + "month" : 1420070400000, + "type" : "hat" + }, + "doc_count" : 1, + "avg_price" : { + "value" : 6.451612903225806 + } + }, + { + "key" : { + "month" : 1420070400000, + "type" : "t-shirt" + }, + "doc_count" : 1, + "avg_price" : { + "value" : 6.451612903225806 + } + }, + { + "key" : { + "month" : 1422748800000, + "type" : "hat" + }, + "doc_count" : 1, + "avg_price" : { + "value" : 1.7857142857142858 + } + }, + { + "key" : { + "month" : 1422748800000, + "type" : "t-shirt" + }, + "doc_count" : 1, + "avg_price" : { + "value" : 0.35714285714285715 + } + }, + { + "key" : { + "month" : 1425168000000, + "type" : "hat" + }, + "doc_count" : 1, + "avg_price" : { + "value" : 6.451612903225806 + } + }, + { + "key" : { + "month" : 1425168000000, + "type" : "t-shirt" + }, + "doc_count" : 1, + "avg_price" : { + "value" : 5.645161290322581 + } + } + ] + } + } +} +-------------------------------------------------- + By adding the `mode` parameter with the value `value_count`, we can change the calculation from `sum` to the number of values of the field: [source,console] diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index ff3aefc847202..f550293a66b31 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -33,9 +33,11 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.RoaringDocIdSet; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Rounding; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.BucketCollector; @@ -46,6 +48,7 @@ import org.elasticsearch.search.aggregations.MultiBucketCollector; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; +import org.elasticsearch.search.aggregations.bucket.histogram.SizedBucketAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.sort.SortAndFormats; @@ -61,7 +64,7 @@ import static org.elasticsearch.search.aggregations.MultiBucketConsumerService.MAX_BUCKET_SETTING; -final class CompositeAggregator extends BucketsAggregator { +public final class CompositeAggregator extends BucketsAggregator implements SizedBucketAggregator { private final int size; private final List sourceNames; private final int[] reverseMuls; @@ -71,6 +74,7 @@ final class CompositeAggregator extends BucketsAggregator { private final CompositeValuesSourceConfig[] sourceConfigs; private final SingleDimensionValuesSource[] sources; private final CompositeValuesCollectorQueue queue; + private final DateHistogramValuesSource[] innerSizedBucketAggregators; private final List entries = new ArrayList<>(); private LeafReaderContext currentLeaf; @@ -111,6 +115,7 @@ final class CompositeAggregator extends BucketsAggregator { ); } this.sourceConfigs = sourceConfigs; + List dateHistogramValuesSources = new ArrayList<>(); for (int i = 0; i < sourceConfigs.length; i++) { this.sources[i] = sourceConfigs[i].createValuesSource( context.bigArrays(), @@ -118,7 +123,11 @@ final class CompositeAggregator extends BucketsAggregator { size, this::addRequestCircuitBreakerBytes ); + if (this.sources[i] instanceof DateHistogramValuesSource) { + dateHistogramValuesSources.add((DateHistogramValuesSource) this.sources[i]); + } } + this.innerSizedBucketAggregators = dateHistogramValuesSources.toArray(new DateHistogramValuesSource[0]); this.queue = new CompositeValuesCollectorQueue(context.bigArrays(), sources, size); if (rawAfterKey != null) { try { @@ -547,6 +556,30 @@ public void collect(int doc, long zeroBucket) throws IOException { }; } + @Override + public double bucketSize(long bucket, Rounding.DateTimeUnit unit) { + if (innerSizedBucketAggregators.length != 1) { + throw new AggregationExecutionException( + "aggregation [" + + name() + + "] does not have exactly one date_histogram value source; exactly one is required when using with rate aggregation" + ); + } + return innerSizedBucketAggregators[0].bucketSize(bucket, unit); + } + + @Override + public double bucketSize(Rounding.DateTimeUnit unit) { + if (innerSizedBucketAggregators.length != 1) { + throw new AggregationExecutionException( + "aggregation [" + + name() + + "] does not have exactly one date_histogram value source; exactly one is required when using with rate aggregation" + ); + } + return innerSizedBucketAggregators[0].bucketSize(unit); + } + private static class Entry { final LeafReaderContext context; final DocIdSet docIdSet; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSource.java new file mode 100644 index 0000000000000..a93fd04936ea3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSource.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.aggregations.bucket.composite; + +import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.bucket.histogram.SizedBucketAggregator; + +/** + * A {@link SingleDimensionValuesSource} for date histogram values. + */ +public class DateHistogramValuesSource extends LongValuesSource implements SizedBucketAggregator { + private final RoundingValuesSource preparedRounding; + + DateHistogramValuesSource( + BigArrays bigArrays, + MappedFieldType fieldType, + RoundingValuesSource roundingValuesSource, + DocValueFormat format, + boolean missingBucket, + int size, + int reverseMul + ) { + super(bigArrays, fieldType, roundingValuesSource::longValues, roundingValuesSource::round, format, missingBucket, size, reverseMul); + this.preparedRounding = roundingValuesSource; + } + + @Override + public double bucketSize(long bucket, Rounding.DateTimeUnit unitSize) { + if (unitSize != null) { + Long value = toComparable((int) bucket); + assert value != null : "unexpected null value in composite agg bucket [" + (int) bucket + "]"; + return preparedRounding.roundingSize(value, unitSize); + } else { + return 1.0; + } + } + + @Override + public double bucketSize(Rounding.DateTimeUnit unitSize) { + if (unitSize != null) { + return preparedRounding.roundingSize(unitSize); + } else { + return 1.0; + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java index 843d6a8acce2e..eb74d3ece081a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java @@ -255,11 +255,10 @@ public static void register(ValuesSourceRegistry.Builder builder) { LongConsumer addRequestCircuitBreakerBytes, CompositeValuesSourceConfig compositeValuesSourceConfig) -> { final RoundingValuesSource roundingValuesSource = (RoundingValuesSource) compositeValuesSourceConfig.valuesSource(); - return new LongValuesSource( + return new DateHistogramValuesSource( bigArrays, compositeValuesSourceConfig.fieldType(), - roundingValuesSource::longValues, - roundingValuesSource::round, + roundingValuesSource, compositeValuesSourceConfig.format(), compositeValuesSourceConfig.missingBucket(), size, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java index 770830e93a1c1..df1fafe941931 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java @@ -44,6 +44,14 @@ public long round(long value) { return rounding.round(value); } + public double roundingSize(long milliSeconds, Rounding.DateTimeUnit unit) { + return rounding.roundingSize(milliSeconds, unit); + } + + public double roundingSize(Rounding.DateTimeUnit unit) { + return rounding.roundingSize(unit); + } + @Override public SortedNumericDocValues longValues(LeafReaderContext context) throws IOException { SortedNumericDocValues values = vs.longValues(context); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java index d07cfd83f04f6..5062f9d788a18 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java @@ -66,7 +66,10 @@ private SizedBucketAggregator findSizedBucketAncestor() { } } if (sizedBucketAggregator == null) { - throw new IllegalArgumentException("The rate aggregation can only be used inside a date histogram"); + throw new IllegalArgumentException( + "The rate aggregation can only be used inside a date histogram aggregation or " + + "composite aggregation with one date histogram value source" + ); } return sizedBucketAggregator; } @@ -109,4 +112,5 @@ public InternalAggregation buildEmptyAggregation() { public void doClose() { Releasables.close(sums, compensations); } + } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorTests.java index 2c44ed3843444..d83118d1f1030 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregatorTests.java @@ -35,8 +35,17 @@ import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.HistogramValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; +import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; @@ -152,19 +161,18 @@ public void testDocValuesMonthToMonthValueCount() throws IOException { RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month") .field("val") .rateMode("value_count"); + AbstractAggregationBuilder dateHistogramAggregationBuilder = randomValidMultiBucketAggBuilder( + rateAggregationBuilder, + new DateHistogramInterval("month") + ); - DateHistogramAggregationBuilder dateHistogramAggregationBuilder = new DateHistogramAggregationBuilder("my_date"); - dateHistogramAggregationBuilder.field(DATE_FIELD); - dateHistogramAggregationBuilder.calendarInterval(new DateHistogramInterval("month")); - - dateHistogramAggregationBuilder.subAggregation(rateAggregationBuilder); testCase(dateHistogramAggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(doc("2010-03-12T01:07:45", new SortedNumericDocValuesField("val", 1))); iw.addDocument(doc("2010-04-01T03:43:34", new SortedNumericDocValuesField("val", 3))); iw.addDocument( doc("2010-04-27T03:43:34", new SortedNumericDocValuesField("val", 4), new SortedNumericDocValuesField("val", 5)) ); - }, (Consumer) dh -> { + }, (Consumer>) dh -> { assertThat(dh.getBuckets(), hasSize(2)); assertThat(((InternalRate) dh.getBuckets().get(0).getAggregations().asList().get(0)).value(), closeTo(1.0, 0.000001)); assertThat(((InternalRate) dh.getBuckets().get(1).getAggregations().asList().get(0)).value(), closeTo(3.0, 0.000001)); @@ -270,7 +278,7 @@ public void testNoFieldMonthToDay() throws IOException { }); } - public void testNoWrapping() throws IOException { + public void testNoWrapping() { MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); MappedFieldType dateType = dateFieldType(DATE_FIELD); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("day"); @@ -282,7 +290,40 @@ public void testNoWrapping() throws IOException { iw.addDocument(doc("2010-04-27T03:43:34", new NumericDocValuesField("val", 4))); }, h -> { fail("Shouldn't be here"); }, dateType, numType) ); - assertEquals("The rate aggregation can only be used inside a date histogram", ex.getMessage()); + assertEquals( + "The rate aggregation can only be used inside a date histogram aggregation or " + + "composite aggregation with one date histogram value source", + ex.getMessage() + ); + } + + public void testCompositeAggregationWithNoDateHistogramValueSources() { + MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); + MappedFieldType dateType = dateFieldType(DATE_FIELD); + RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("day"); + List> valuesSourceBuilders = randomBoolean() + ? Collections.singletonList(new HistogramValuesSourceBuilder("histo").field("val")) + : Arrays.asList( + new DateHistogramValuesSourceBuilder("my_date").field(DATE_FIELD).calendarInterval(new DateHistogramInterval("month")), + new DateHistogramValuesSourceBuilder("my_date2").field(DATE_FIELD).calendarInterval(new DateHistogramInterval("month")), + new HistogramValuesSourceBuilder("histo").field("val") + ); + + CompositeAggregationBuilder compositeAggregationBuilder = new CompositeAggregationBuilder("my_buckets", valuesSourceBuilders) + .subAggregation(rateAggregationBuilder); + AggregationExecutionException ex = expectThrows( + AggregationExecutionException.class, + () -> testCase(compositeAggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument(doc("2010-03-12T01:07:45", new NumericDocValuesField("val", 1))); + iw.addDocument(doc("2010-04-01T03:43:34", new NumericDocValuesField("val", 3))); + iw.addDocument(doc("2010-04-27T03:43:34", new NumericDocValuesField("val", 4))); + }, h -> fail("Shouldn't be here"), dateType, numType) + ); + assertEquals( + ex.getMessage(), + "aggregation [my_buckets] does not have exactly one date_histogram value source; " + + "exactly one is required when using with rate aggregation" + ); } public void testDoubleWrapping() throws IOException { @@ -378,6 +419,75 @@ public void testKeywordSandwich() throws IOException { }, dateType, numType, keywordType); } + public void testWithComposite() throws IOException { + MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); + MappedFieldType dateType = dateFieldType(DATE_FIELD); + MappedFieldType keywordType = new KeywordFieldMapper.KeywordFieldType("term"); + RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month").field("val"); + if (randomBoolean()) { + rateAggregationBuilder.rateMode("sum"); + } + CompositeAggregationBuilder compositeAggregationBuilder = new CompositeAggregationBuilder( + "my_buckets", + Arrays.asList( + new DateHistogramValuesSourceBuilder("my_date").field(DATE_FIELD).calendarInterval(new DateHistogramInterval("month")), + new TermsValuesSourceBuilder("my_term").field("term") + ) + ).subAggregation(rateAggregationBuilder); + + testCase(compositeAggregationBuilder, new MatchAllDocsQuery(), iw -> { + iw.addDocument( + doc( + "2010-03-11T01:07:45", + new NumericDocValuesField("val", 1), + new IntPoint("val", 1), + new SortedSetDocValuesField("term", new BytesRef("a")) + ) + ); + iw.addDocument( + doc( + "2010-03-12T01:07:45", + new NumericDocValuesField("val", 2), + new IntPoint("val", 2), + new SortedSetDocValuesField("term", new BytesRef("a")) + ) + ); + iw.addDocument( + doc( + "2010-04-01T03:43:34", + new NumericDocValuesField("val", 3), + new IntPoint("val", 3), + new SortedSetDocValuesField("term", new BytesRef("a")) + ) + ); + iw.addDocument( + doc( + "2010-04-27T03:43:34", + new NumericDocValuesField("val", 4), + new IntPoint("val", 4), + new SortedSetDocValuesField("term", new BytesRef("b")) + ) + ); + }, (Consumer) composite -> { + assertThat(composite.getBuckets(), hasSize(3)); + // Monthly rate of term a in the first month + assertThat( + ((InternalRate) composite.getBuckets().get(0).getAggregations().asList().get(0)).getValue(), + closeTo(3.0, 0.0000001) + ); + // Monthly rate of term a in the second month + assertThat( + ((InternalRate) composite.getBuckets().get(1).getAggregations().asList().get(0)).getValue(), + closeTo(3.0, 0.0000001) + ); + // Monthly rate of term b in the second month + assertThat( + ((InternalRate) composite.getBuckets().get(2).getAggregations().asList().get(0)).getValue(), + closeTo(4.0, 0.0000001) + ); + }, dateType, numType, keywordType); + } + public void testUnsupportedKeywordSandwich() throws IOException { String rate; String histogram; @@ -563,17 +673,17 @@ public void testFilter() throws IOException { if (randomBoolean()) { rateAggregationBuilder.rateMode("sum"); } - - DateHistogramAggregationBuilder dateHistogramAggregationBuilder = new DateHistogramAggregationBuilder("my_date").field(DATE_FIELD) - .calendarInterval(new DateHistogramInterval("month")) - .subAggregation(rateAggregationBuilder); + AbstractAggregationBuilder dateHistogramAggregationBuilder = randomValidMultiBucketAggBuilder( + rateAggregationBuilder, + new DateHistogramInterval("month") + ); testCase(dateHistogramAggregationBuilder, new TermQuery(new Term("term", "a")), iw -> { iw.addDocument(doc("2010-03-11T01:07:45", new NumericDocValuesField("val", 1), new StringField("term", "a", Field.Store.NO))); iw.addDocument(doc("2010-03-12T01:07:45", new NumericDocValuesField("val", 2), new StringField("term", "a", Field.Store.NO))); iw.addDocument(doc("2010-04-01T03:43:34", new NumericDocValuesField("val", 3), new StringField("term", "a", Field.Store.NO))); iw.addDocument(doc("2010-04-27T03:43:34", new NumericDocValuesField("val", 4), new StringField("term", "b", Field.Store.NO))); - }, (Consumer) dh -> { + }, (Consumer>) dh -> { assertThat(dh.getBuckets(), hasSize(2)); assertThat(((InternalRate) dh.getBuckets().get(0).getAggregations().asList().get(0)).value(), closeTo(3.0, 0.000001)); assertThat(((InternalRate) dh.getBuckets().get(1).getAggregations().asList().get(0)).value(), closeTo(3.0, 0.000001)); @@ -590,16 +700,17 @@ public void testFormatter() throws IOException { rateAggregationBuilder.rateMode("sum"); } - DateHistogramAggregationBuilder dateHistogramAggregationBuilder = new DateHistogramAggregationBuilder("my_date").field(DATE_FIELD) - .calendarInterval(new DateHistogramInterval("month")) - .subAggregation(rateAggregationBuilder); + AbstractAggregationBuilder dateHistogramAggregationBuilder = randomValidMultiBucketAggBuilder( + rateAggregationBuilder, + new DateHistogramInterval("month") + ); testCase(dateHistogramAggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(doc("2010-03-11T01:07:45", new NumericDocValuesField("val", 1))); iw.addDocument(doc("2010-03-12T01:07:45", new NumericDocValuesField("val", 2))); iw.addDocument(doc("2010-04-01T03:43:34", new NumericDocValuesField("val", 3))); iw.addDocument(doc("2010-04-27T03:43:34", new NumericDocValuesField("val", 4))); - }, (Consumer) dh -> { + }, (Consumer>) dh -> { assertThat(dh.getBuckets(), hasSize(2)); assertThat(((InternalRate) dh.getBuckets().get(0).getAggregations().asList().get(0)).getValueAsString(), equalTo("03.0/M")); assertThat(((InternalRate) dh.getBuckets().get(1).getAggregations().asList().get(0)).getValueAsString(), equalTo("07.0/M")); @@ -614,14 +725,15 @@ public void testHistogramFieldMonthToMonth() throws IOException { rateAggregationBuilder.rateMode("sum"); } - DateHistogramAggregationBuilder dateHistogramAggregationBuilder = new DateHistogramAggregationBuilder("my_date").field(DATE_FIELD) - .calendarInterval(new DateHistogramInterval("month")) - .subAggregation(rateAggregationBuilder); + AbstractAggregationBuilder dateHistogramAggregationBuilder = randomValidMultiBucketAggBuilder( + rateAggregationBuilder, + new DateHistogramInterval("month") + ); testCase(dateHistogramAggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(doc("2010-03-01T00:00:00", histogramFieldDocValues("val", new double[] { 1, 2 }))); iw.addDocument(doc("2010-04-01T00:00:00", histogramFieldDocValues("val", new double[] { 3, 4 }))); - }, (Consumer) dh -> { + }, (Consumer>) dh -> { assertThat(dh.getBuckets(), hasSize(2)); assertThat(((InternalRate) dh.getBuckets().get(0).getAggregations().asList().get(0)).getValue(), closeTo(3.0, 0.000001)); assertThat(((InternalRate) dh.getBuckets().get(1).getAggregations().asList().get(0)).getValue(), closeTo(7.0, 0.000001)); @@ -635,15 +747,14 @@ public void testHistogramFieldMonthToYear() throws IOException { if (randomBoolean()) { rateAggregationBuilder.rateMode("sum"); } - - DateHistogramAggregationBuilder dateHistogramAggregationBuilder = new DateHistogramAggregationBuilder("my_date").field(DATE_FIELD) - .calendarInterval(new DateHistogramInterval("year")) - .subAggregation(rateAggregationBuilder); - + AbstractAggregationBuilder dateHistogramAggregationBuilder = randomValidMultiBucketAggBuilder( + rateAggregationBuilder, + new DateHistogramInterval("year") + ); testCase(dateHistogramAggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(doc("2010-03-01T00:00:00", histogramFieldDocValues("val", new double[] { 1, 2 }))); iw.addDocument(doc("2010-04-01T00:00:00", histogramFieldDocValues("val", new double[] { 3, 4 }))); - }, (Consumer) dh -> { + }, (Consumer>) dh -> { assertThat(dh.getBuckets(), hasSize(1)); assertThat(((InternalRate) dh.getBuckets().get(0).getAggregations().asList().get(0)).getValue(), closeTo(10.0 / 12, 0.000001)); }, dateType, histType); @@ -656,14 +767,15 @@ public void testHistogramFieldMonthToMonthValueCount() throws IOException { .rateMode("value_count") .field("val"); - DateHistogramAggregationBuilder dateHistogramAggregationBuilder = new DateHistogramAggregationBuilder("my_date").field(DATE_FIELD) - .calendarInterval(new DateHistogramInterval("month")) - .subAggregation(rateAggregationBuilder); + AbstractAggregationBuilder dateHistogramAggregationBuilder = randomValidMultiBucketAggBuilder( + rateAggregationBuilder, + new DateHistogramInterval("month") + ); testCase(dateHistogramAggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(doc("2010-03-01T00:00:00", histogramFieldDocValues("val", new double[] { 1, 2 }))); iw.addDocument(doc("2010-04-01T00:00:00", histogramFieldDocValues("val", new double[] { 3, 4, 5 }))); - }, (Consumer) dh -> { + }, (Consumer>) dh -> { assertThat(dh.getBuckets(), hasSize(2)); assertThat(((InternalRate) dh.getBuckets().get(0).getAggregations().asList().get(0)).getValue(), closeTo(2.0, 0.000001)); assertThat(((InternalRate) dh.getBuckets().get(1).getAggregations().asList().get(0)).getValue(), closeTo(3.0, 0.000001)); @@ -677,14 +789,15 @@ public void testHistogramFieldMonthToYearValueCount() throws IOException { .rateMode("value_count") .field("val"); - DateHistogramAggregationBuilder dateHistogramAggregationBuilder = new DateHistogramAggregationBuilder("my_date").field(DATE_FIELD) - .calendarInterval(new DateHistogramInterval("year")) - .subAggregation(rateAggregationBuilder); + AbstractAggregationBuilder dateHistogramAggregationBuilder = randomValidMultiBucketAggBuilder( + rateAggregationBuilder, + new DateHistogramInterval("year") + ); testCase(dateHistogramAggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(doc("2010-03-01T00:00:00", histogramFieldDocValues("val", new double[] { 1, 2 }))); iw.addDocument(doc("2010-04-01T00:00:00", histogramFieldDocValues("val", new double[] { 3, 4, 5 }))); - }, (Consumer) dh -> { + }, (Consumer>) dh -> { assertThat(dh.getBuckets(), hasSize(1)); assertThat(((InternalRate) dh.getBuckets().get(0).getAggregations().asList().get(0)).getValue(), closeTo(5.0 / 12, 0.000001)); }, dateType, histType); @@ -696,9 +809,10 @@ public void testFilterWithHistogramField() throws IOException { MappedFieldType keywordType = new KeywordFieldMapper.KeywordFieldType("term"); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month").field("val"); - DateHistogramAggregationBuilder dateHistogramAggregationBuilder = new DateHistogramAggregationBuilder("my_date").field(DATE_FIELD) - .calendarInterval(new DateHistogramInterval("month")) - .subAggregation(rateAggregationBuilder); + AbstractAggregationBuilder dateHistogramAggregationBuilder = randomValidMultiBucketAggBuilder( + rateAggregationBuilder, + new DateHistogramInterval("month") + ); testCase(dateHistogramAggregationBuilder, new TermQuery(new Term("term", "a")), iw -> { iw.addDocument( @@ -714,7 +828,7 @@ public void testFilterWithHistogramField() throws IOException { iw.addDocument( doc("2010-04-01T00:00:00", histogramFieldDocValues("val", new double[] { 4 }), new StringField("term", "b", Field.Store.NO)) ); - }, (Consumer) dh -> { + }, (Consumer>) dh -> { assertThat(dh.getBuckets(), hasSize(2)); assertThat(((InternalRate) dh.getBuckets().get(0).getAggregations().asList().get(0)).value(), closeTo(3.0, 0.000001)); assertThat(((InternalRate) dh.getBuckets().get(1).getAggregations().asList().get(0)).value(), closeTo(3.0, 0.000001)); @@ -726,9 +840,10 @@ public void testModeWithoutField() { MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month").rateMode("sum"); - DateHistogramAggregationBuilder dateHistogramAggregationBuilder = new DateHistogramAggregationBuilder("my_date").field(DATE_FIELD) - .calendarInterval(new DateHistogramInterval("month")) - .subAggregation(rateAggregationBuilder); + AbstractAggregationBuilder dateHistogramAggregationBuilder = randomValidMultiBucketAggBuilder( + rateAggregationBuilder, + new DateHistogramInterval("month") + ); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, @@ -744,6 +859,19 @@ public void testModeWithoutField() { assertEquals("The mode parameter is only supported with field or script", ex.getMessage()); } + private static AbstractAggregationBuilder randomValidMultiBucketAggBuilder( + RateAggregationBuilder rateAggregationBuilder, + DateHistogramInterval interval + ) { + AbstractAggregationBuilder dateHistogramAggregationBuilder = randomBoolean() + ? new DateHistogramAggregationBuilder("my_date").field(DATE_FIELD).calendarInterval(interval) + : new CompositeAggregationBuilder( + "my_date", + List.of(new DateHistogramValuesSourceBuilder("my_date").field(DATE_FIELD).calendarInterval(interval)) + ); + return dateHistogramAggregationBuilder.subAggregation(rateAggregationBuilder); + } + private void testCase( Query query, String interval, From 2d701140967a6fc20a8ea75b43666a54c5c3d878 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 1 Sep 2021 07:51:37 -0400 Subject: [PATCH 060/128] [ML] track feature usage for data frame analytics, inference, and anomaly jobs (#76789) This adds feature tracking for machine learning features. Model Snapshot upgrader Anomaly jobs Data frame analytics jobs Can all take advantage of the license state tracking built for persistent tasks. The ModelLoadingService needed special handling to ensure that models cached and referenced by pipelines are tracked. License tracking is done per-node and allows for a simple view into when a feature was last used on a given node. --- .../license/GetFeatureUsageResponse.java | 30 ++- .../license/LicensedFeature.java | 24 +- .../TransportGetFeatureUsageAction.java | 6 +- .../license/XPackLicenseState.java | 6 +- .../core/ml/inference/TrainedModelConfig.java | 3 + .../license/GetFeatureUsageResponseTests.java | 10 +- .../license/XPackLicenseStateTests.java | 4 +- .../ml/inference/TrainedModelConfigTests.java | 2 - .../integration/ModelInferenceActionIT.java | 2 +- .../TestFeatureLicenseTrackingIT.java | 239 ++++++++++++++++++ .../xpack/ml/MachineLearning.java | 30 ++- ...ransportStartDataFrameAnalyticsAction.java | 7 +- .../ml/dataframe/DataFrameAnalyticsTask.java | 38 +-- .../loadingservice/ModelLoadingService.java | 31 ++- .../upgrader/SnapshotUpgradeTask.java | 35 ++- .../upgrader/SnapshotUpgradeTaskExecutor.java | 9 +- .../xpack/ml/job/task/JobTask.java | 13 +- ...ortStartDataFrameAnalyticsActionTests.java | 5 +- .../DataFrameAnalyticsTaskTests.java | 5 +- .../ModelLoadingServiceTests.java | 40 ++- 20 files changed, 454 insertions(+), 85 deletions(-) create mode 100644 x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetFeatureUsageResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetFeatureUsageResponse.java index 6984e1117a89f..1b66ebd527b1a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetFeatureUsageResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetFeatureUsageResponse.java @@ -27,11 +27,11 @@ public class GetFeatureUsageResponse extends ActionResponse implements ToXContentObject { public static class FeatureUsageInfo implements Writeable { - final String family; - final String name; - final ZonedDateTime lastUsedTime; - final String context; - final String licenseLevel; + private final String family; + private final String name; + private final ZonedDateTime lastUsedTime; + private final String context; + private final String licenseLevel; public FeatureUsageInfo(@Nullable String family, String name, ZonedDateTime lastUsedTime, @Nullable String context, String licenseLevel) { @@ -70,6 +70,26 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeString(licenseLevel); } + + public String getFamily() { + return family; + } + + public String getName() { + return name; + } + + public ZonedDateTime getLastUsedTime() { + return lastUsedTime; + } + + public String getContext() { + return context; + } + + public String getLicenseLevel() { + return licenseLevel; + } } private List features; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java index 24b95d9d55983..6f3e18c825d09 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java @@ -76,10 +76,10 @@ public void stopTracking(XPackLicenseState state, String contextName) { } } - final String family; - final String name; - final License.OperationMode minimumOperationMode; - final boolean needsActive; + private final String family; + private final String name; + private final License.OperationMode minimumOperationMode; + private final boolean needsActive; protected LicensedFeature(String family, String name, License.OperationMode minimumOperationMode, boolean needsActive) { this.family = family; @@ -88,6 +88,22 @@ protected LicensedFeature(String family, String name, License.OperationMode mini this.needsActive = needsActive; } + public String getFamily() { + return family; + } + + public String getName() { + return name; + } + + public License.OperationMode getMinimumOperationMode() { + return minimumOperationMode; + } + + public boolean isNeedsActive() { + return needsActive; + } + /** Create a momentary feature for hte given license level */ public static Momentary momentary(String family, String name, License.OperationMode licenseLevel) { return new Momentary(family, name, licenseLevel, true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetFeatureUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetFeatureUsageAction.java index e0efc7e94358c..20c67447fa3db 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetFeatureUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetFeatureUsageAction.java @@ -45,11 +45,11 @@ protected void doExecute(Task task, GetFeatureUsageRequest request, ActionListen ZonedDateTime lastUsedTime = Instant.ofEpochMilli(lastUsed).atZone(ZoneOffset.UTC); usageInfos.add( new GetFeatureUsageResponse.FeatureUsageInfo( - usage.feature().family, - usage.feature().name, + usage.feature().getFamily(), + usage.feature().getName(), lastUsedTime, usage.contextName(), - usage.feature().minimumOperationMode.description() + usage.feature().getMinimumOperationMode().description() ) ); }); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index aae683e11262f..e1e052919841e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -511,7 +511,7 @@ public boolean isAllowed(Feature feature) { // Package protected: Only allowed to be called by LicensedFeature boolean isAllowed(LicensedFeature feature) { - if (isAllowedByLicense(feature.minimumOperationMode, feature.needsActive)) { + if (isAllowedByLicense(feature.getMinimumOperationMode(), feature.isNeedsActive())) { return true; } return false; @@ -521,7 +521,7 @@ private void checkForExpiry(LicensedFeature feature) { final long licenseExpiryDate = getLicenseExpiryDate(); // TODO: this should use epochMillisProvider to avoid a system call + testability final long diff = licenseExpiryDate - System.currentTimeMillis(); - if (feature.minimumOperationMode.compareTo(OperationMode.BASIC) > 0 && + if (feature.getMinimumOperationMode().compareTo(OperationMode.BASIC) > 0 && LICENSE_EXPIRATION_WARNING_PERIOD.getMillis() > diff) { final long days = TimeUnit.MILLISECONDS.toDays(diff); final String expiryMessage = (days == 0 && diff > 0)? "expires today": @@ -640,7 +640,7 @@ private FeatureUsage(LicensedFeature feature, String context) { @Override public String toString() { - return context == null ? feature.name : feature.name + ":" + context; + return context == null ? feature.getName() : feature.getName() + ":" + context; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java index e1238e52b009f..e09b16957770f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java @@ -192,6 +192,8 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo } this.estimatedOperations = estimatedOperations; this.licenseLevel = License.OperationMode.parse(ExceptionsHelper.requireNonNull(licenseLevel, LICENSE_LEVEL)); + assert this.licenseLevel.equals(License.OperationMode.PLATINUM) || this.licenseLevel.equals(License.OperationMode.BASIC) : + "[" + LICENSE_LEVEL.getPreferredName() + "] only [platinum] or [basic] is supported"; this.defaultFieldMap = defaultFieldMap == null ? null : Collections.unmodifiableMap(defaultFieldMap); this.inferenceConfig = inferenceConfig; this.location = location; @@ -330,6 +332,7 @@ public long getEstimatedOperations() { return estimatedOperations; } + //TODO if we ever support anything other than "basic" and platinum, we need to adjust our feature tracking logic public License.OperationMode getLicenseLevel() { return licenseLevel; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/GetFeatureUsageResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/GetFeatureUsageResponseTests.java index 424994cd6bc60..f9d7d309711d2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/GetFeatureUsageResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/GetFeatureUsageResponseTests.java @@ -37,12 +37,12 @@ public void assertStreamInputOutput(Version version, String family, String conte GetFeatureUsageResponse finalResponse = new GetFeatureUsageResponse(input); assertThat(finalResponse.getFeatures(), hasSize(1)); FeatureUsageInfo fui2 = finalResponse.getFeatures().get(0); - assertThat(fui2.family, equalTo(family)); - assertThat(fui2.name, equalTo("feature")); + assertThat(fui2.getFamily(), equalTo(family)); + assertThat(fui2.getName(), equalTo("feature")); // time is truncated to nearest second - assertThat(fui2.lastUsedTime, equalTo(zdt.withZoneSameInstant(ZoneOffset.UTC).withNano(0))); - assertThat(fui2.context, equalTo(context)); - assertThat(fui2.licenseLevel, equalTo("gold")); + assertThat(fui2.getLastUsedTime(), equalTo(zdt.withZoneSameInstant(ZoneOffset.UTC).withNano(0))); + assertThat(fui2.getContext(), equalTo(context)); + assertThat(fui2.getLicenseLevel(), equalTo("gold")); } public void testPre715StreamFormat() throws IOException { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index bd54860324feb..80c641b3b3be2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -459,7 +459,7 @@ public void testLastUsedMomentaryFeature() { assertThat("feature.check tracks usage", lastUsed, aMapWithSize(1)); XPackLicenseState.FeatureUsage usage = Iterables.get(lastUsed.keySet(), 0); - assertThat(usage.feature().name, equalTo("goldFeature")); + assertThat(usage.feature().getName(), equalTo("goldFeature")); assertThat(usage.contextName(), nullValue()); assertThat(lastUsed.get(usage), equalTo(100L)); @@ -487,7 +487,7 @@ public void testLastUsedPersistentFeature() { assertThat(lastUsed, aMapWithSize(1)); XPackLicenseState.FeatureUsage usage = Iterables.get(lastUsed.keySet(), 0); - assertThat(usage.feature().name, equalTo("goldFeature")); + assertThat(usage.feature().getName(), equalTo("goldFeature")); assertThat(usage.contextName(), equalTo("somecontext")); assertThat(lastUsed.get(usage), equalTo(200L)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java index a320b4557dfd8..53ab83efb81d1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java @@ -69,8 +69,6 @@ public static TrainedModelConfig.Builder createTestInstance(String modelId) { .setEstimatedHeapMemory(randomNonNegativeLong()) .setEstimatedOperations(randomNonNegativeLong()) .setLicenseLevel(randomFrom(License.OperationMode.PLATINUM.description(), - License.OperationMode.ENTERPRISE.description(), - License.OperationMode.GOLD.description(), License.OperationMode.BASIC.description())) .setInferenceConfig(randomFrom(ClassificationConfigTests.randomClassificationConfig(), RegressionConfigTests.randomRegressionConfig())) diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ModelInferenceActionIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ModelInferenceActionIT.java index eb9bef0268779..f85b87961e9af 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ModelInferenceActionIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ModelInferenceActionIT.java @@ -355,7 +355,7 @@ public void testInferMissingFields() throws Exception { } } - private static TrainedModelConfig.Builder buildTrainedModelConfigBuilder(String modelId) { + static TrainedModelConfig.Builder buildTrainedModelConfigBuilder(String modelId) { return TrainedModelConfig.builder() .setCreatedBy("ml_test") .setParsedDefinition(TrainedModelDefinitionTests.createRandomBuilder()) diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java new file mode 100644 index 0000000000000..9af81a3a64529 --- /dev/null +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java @@ -0,0 +1,239 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.integration; + +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.action.ingest.DeletePipelineAction; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineAction; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.license.GetFeatureUsageRequest; +import org.elasticsearch.license.GetFeatureUsageResponse; +import org.elasticsearch.license.TransportGetFeatureUsageAction; +import org.elasticsearch.xpack.core.ml.action.CloseJobAction; +import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; +import org.elasticsearch.xpack.core.ml.action.OpenJobAction; +import org.elasticsearch.xpack.core.ml.action.PutJobAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelDefinition; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncoding; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.config.JobState; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; +import org.junit.After; + +import java.time.ZonedDateTime; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.ml.MachineLearning.ML_FEATURE_FAMILY; +import static org.elasticsearch.xpack.ml.inference.loadingservice.LocalModelTests.buildClassification; +import static org.elasticsearch.xpack.ml.integration.ModelInferenceActionIT.buildTrainedModelConfigBuilder; +import static org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase.createScheduledJob; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; + +public class TestFeatureLicenseTrackingIT extends MlSingleNodeTestCase { + + private final Set createdPipelines = new HashSet<>(); + @After + public void cleanup() { + for (String pipeline : createdPipelines) { + try { + client().execute(DeletePipelineAction.INSTANCE, new DeletePipelineRequest(pipeline)).actionGet(); + } catch (Exception ex) { + logger.warn(() -> new ParameterizedMessage("error cleaning up pipeline [{}]", pipeline), ex); + } + } + } + + public void testFeatureTrackingAnomalyJob() throws Exception { + putAndStartJob("job-feature-usage"); + GetFeatureUsageResponse.FeatureUsageInfo mlFeatureUsage = getFeatureUsageInfo() + .stream() + .filter(f -> f.getFamily().equals(ML_FEATURE_FAMILY)) + .filter(f -> f.getName().equals(MachineLearning.ML_ANOMALY_JOBS_FEATURE.getName())) + .findAny() + .orElse(null); + assertThat(mlFeatureUsage, is(not(nullValue()))); + assertThat(mlFeatureUsage.getContext(), containsString("job-feature-usage")); + // While the job is opened, the lastUsage moves forward to "now". Verify it does that + ZonedDateTime lastUsage = mlFeatureUsage.getLastUsedTime(); + assertBusy(() -> { + ZonedDateTime recentUsage = getFeatureUsageInfo() + .stream() + .filter(f -> f.getFamily().equals(ML_FEATURE_FAMILY)) + .filter(f -> f.getName().equals(MachineLearning.ML_ANOMALY_JOBS_FEATURE.getName())) + .map(GetFeatureUsageResponse.FeatureUsageInfo::getLastUsedTime) + .findAny() + .orElse(null); + assertThat(recentUsage, is(not(nullValue()))); + assertThat(lastUsage.toInstant(), lessThan(recentUsage.toInstant())); + }); + + client().execute(CloseJobAction.INSTANCE, new CloseJobAction.Request("job-feature-usage")).actionGet(); + + mlFeatureUsage = getFeatureUsageInfo() + .stream() + .filter(f -> f.getFamily().equals(ML_FEATURE_FAMILY)) + .filter(f -> f.getName().equals(MachineLearning.ML_ANOMALY_JOBS_FEATURE.getName())) + .findAny() + .orElse(null); + assertThat(mlFeatureUsage, is(not(nullValue()))); + assertThat(mlFeatureUsage.getContext(), containsString("job-feature-usage")); + assertThat(mlFeatureUsage.getLastUsedTime(), is(not(nullValue()))); + + ZonedDateTime lastUsageAfterClose = mlFeatureUsage.getLastUsedTime(); + + assertBusy(() -> { + ZonedDateTime recentUsage =getFeatureUsageInfo() + .stream() + .filter(f -> f.getFamily().equals(ML_FEATURE_FAMILY)) + .filter(f -> f.getName().equals(MachineLearning.ML_ANOMALY_JOBS_FEATURE.getName())) + .map(GetFeatureUsageResponse.FeatureUsageInfo::getLastUsedTime) + .findAny() + .orElse(null); + assertThat(recentUsage, is(not(nullValue()))); + assertThat(lastUsageAfterClose.toInstant(), equalTo(recentUsage.toInstant())); + }); + } + + public void testFeatureTrackingInferenceModelPipeline() throws Exception { + String modelId = "test-load-models-classification-license-tracking"; + Map oneHotEncoding = new HashMap<>(); + oneHotEncoding.put("cat", "animal_cat"); + oneHotEncoding.put("dog", "animal_dog"); + TrainedModelConfig config = buildTrainedModelConfigBuilder(modelId) + .setInput(new TrainedModelInput(Arrays.asList("field.foo", "field.bar", "other.categorical"))) + .setInferenceConfig(new ClassificationConfig(3)) + .setParsedDefinition(new TrainedModelDefinition.Builder() + .setPreProcessors(Arrays.asList(new OneHotEncoding("other.categorical", oneHotEncoding, false))) + .setTrainedModel(buildClassification(true))) + .build(); + client().execute(PutTrainedModelAction.INSTANCE, new PutTrainedModelAction.Request(config)).actionGet(); + + String pipelineId = "pipeline-inference-model-tracked"; + putTrainedModelIngestPipeline(pipelineId, modelId); + createdPipelines.add(pipelineId); + + // wait for the feature to start being used + assertBusy(() -> { + GetFeatureUsageResponse.FeatureUsageInfo mlFeatureUsage = getFeatureUsageInfo() + .stream() + .filter(f -> f.getFamily().equals(ML_FEATURE_FAMILY)) + .filter(f -> f.getName().equals(MachineLearning.ML_MODEL_INFERENCE_FEATURE.getName())) + .findAny() + .orElse(null); + assertThat(mlFeatureUsage, is(not(nullValue()))); + assertThat(mlFeatureUsage.getContext(), containsString(modelId)); + }); + + GetFeatureUsageResponse.FeatureUsageInfo mlFeatureUsage = getFeatureUsageInfo() + .stream() + .filter(f -> f.getFamily().equals(ML_FEATURE_FAMILY)) + .filter(f -> f.getName().equals(MachineLearning.ML_MODEL_INFERENCE_FEATURE.getName())) + .findAny() + .orElse(null); + assertThat(mlFeatureUsage, is(not(nullValue()))); + // While the model is referenced, the lastUsage moves forward to "now". Verify it does that + ZonedDateTime lastUsage = mlFeatureUsage.getLastUsedTime(); + assertBusy(() -> { + ZonedDateTime recentUsage = getFeatureUsageInfo() + .stream() + .filter(f -> f.getFamily().equals(ML_FEATURE_FAMILY)) + .filter(f -> f.getName().equals(MachineLearning.ML_MODEL_INFERENCE_FEATURE.getName())) + .map(GetFeatureUsageResponse.FeatureUsageInfo::getLastUsedTime) + .findAny() + .orElse(null); + assertThat(recentUsage, is(not(nullValue()))); + assertThat(lastUsage.toInstant(), lessThan(recentUsage.toInstant())); + }); + + client().execute(DeletePipelineAction.INSTANCE, new DeletePipelineRequest(pipelineId)).actionGet(); + createdPipelines.remove(pipelineId); + + // Make sure that feature usage keeps the last usage once the model is removed + assertBusy(() -> { + ZonedDateTime recentUsage = getFeatureUsageInfo() + .stream() + .filter(f -> f.getFamily().equals(ML_FEATURE_FAMILY)) + .filter(f -> f.getName().equals(MachineLearning.ML_MODEL_INFERENCE_FEATURE.getName())) + .map(GetFeatureUsageResponse.FeatureUsageInfo::getLastUsedTime) + .findAny() + .orElse(null); + assertThat(recentUsage, is(not(nullValue()))); + ZonedDateTime secondRecentUsage = getFeatureUsageInfo() + .stream() + .filter(f -> f.getFamily().equals(ML_FEATURE_FAMILY)) + .filter(f -> f.getName().equals(MachineLearning.ML_MODEL_INFERENCE_FEATURE.getName())) + .map(GetFeatureUsageResponse.FeatureUsageInfo::getLastUsedTime) + .findAny() + .orElse(null); + assertThat(secondRecentUsage, is(not(nullValue()))); + assertThat(secondRecentUsage.toInstant(), equalTo(recentUsage.toInstant())); + }); + } + + private List getFeatureUsageInfo() { + return client() + .execute(TransportGetFeatureUsageAction.TYPE, new GetFeatureUsageRequest()) + .actionGet() + .getFeatures(); + } + + private void putAndStartJob(String jobId) throws Exception { + Job.Builder job = createScheduledJob(jobId); + client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job)).actionGet(); + client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(jobId)).actionGet(); + assertBusy(() -> assertEquals(getJobStats(job.getId()).get(0).getState(), JobState.OPENED)); + } + + private List getJobStats(String jobId) { + GetJobsStatsAction.Request request = new GetJobsStatsAction.Request(jobId); + GetJobsStatsAction.Response response = client().execute(GetJobsStatsAction.INSTANCE, request).actionGet(); + return response.getResponse().results(); + } + + private void putTrainedModelIngestPipeline(String pipelineId, String modelId) throws Exception { + client().execute( + PutPipelineAction.INSTANCE, + new PutPipelineRequest( + pipelineId, + new BytesArray( + "{\n" + + " \"processors\": [\n" + + " {\n" + + " \"inference\": {\n" + + " \"inference_config\": {\"classification\":{}},\n" + + " \"model_id\": \"" + modelId + "\",\n" + + " \"field_map\": {}\n" + + " }\n" + + " }\n" + + " ]\n" + + " }" + ), + XContentType.JSON + ) + ).actionGet(); + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index cc1751ea9476c..864eefdb567cd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -445,9 +445,23 @@ public class MachineLearning extends Plugin implements SystemIndexPlugin, // This is for performance testing. It's not exposed to the end user. // Recompile if you want to compare performance with C++ tokenization. public static final boolean CATEGORIZATION_TOKENIZATION_IN_JAVA = true; - - public static final LicensedFeature.Persistent ML_JOBS_FEATURE = - LicensedFeature.persistent("machine-learning", "anomaly-detection-job", License.OperationMode.PLATINUM); + public static final String ML_FEATURE_FAMILY = "machine-learning"; + + public static final LicensedFeature.Persistent ML_ANOMALY_JOBS_FEATURE = LicensedFeature.persistent( + ML_FEATURE_FAMILY, + "anomaly-detection-job", + License.OperationMode.PLATINUM + ); + public static final LicensedFeature.Persistent ML_ANALYTICS_JOBS_FEATURE = LicensedFeature.persistent( + ML_FEATURE_FAMILY, + "data-frame-analytics-job", + License.OperationMode.PLATINUM + ); + public static final LicensedFeature.Persistent ML_MODEL_INFERENCE_FEATURE = LicensedFeature.persistent( + ML_FEATURE_FAMILY, + "model-inference", + License.OperationMode.PLATINUM + ); @Override public Map getProcessors(Processor.Parameters parameters) { @@ -840,7 +854,9 @@ public Collection createComponents(Client client, ClusterService cluster trainedModelStatsService, settings, clusterService.getNodeName(), - inferenceModelBreaker.get()); + inferenceModelBreaker.get(), + getLicenseState() + ); this.modelLoadingService.set(modelLoadingService); this.deploymentManager.set(new DeploymentManager(client, xContentRegistry, threadPool, pyTorchProcessFactory)); @@ -959,13 +975,15 @@ public List> getPersistentTasksExecutor(ClusterServic dataFrameAnalyticsManager.get(), dataFrameAnalyticsAuditor.get(), memoryTracker.get(), - expressionResolver), + expressionResolver, + getLicenseState()), new SnapshotUpgradeTaskExecutor(settings, clusterService, autodetectProcessManager.get(), memoryTracker.get(), expressionResolver, - client) + client, + getLicenseState()) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index 515c56acfddda..387f2774c0d85 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -586,11 +586,13 @@ public static class TaskExecutor extends AbstractJobPersistentTasksExecutor clusterState = event.state()); } @@ -609,7 +612,7 @@ protected AllocatedPersistentTask createTask( PersistentTasksCustomMetadata.PersistentTask persistentTask, Map headers) { return new DataFrameAnalyticsTask( - id, type, action, parentTaskId, headers, client, manager, auditor, persistentTask.getParams()); + id, type, action, parentTaskId, headers, client, manager, auditor, persistentTask.getParams(), licenseState); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java index 2b6516007b082..8c85ea5ea2eb0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java @@ -25,11 +25,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.IdsQueryBuilder; -import org.elasticsearch.persistent.AllocatedPersistentTask; -import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.license.LicensedAllocatedPersistentTask; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.StartDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.StopDataFrameAnalyticsAction; @@ -40,6 +39,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.PhaseProgress; import org.elasticsearch.xpack.core.watcher.watch.Payload; +import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.dataframe.stats.ProgressTracker; import org.elasticsearch.xpack.ml.dataframe.stats.StatsHolder; import org.elasticsearch.xpack.ml.dataframe.steps.DataFrameAnalyticsStep; @@ -53,7 +53,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -public class DataFrameAnalyticsTask extends AllocatedPersistentTask implements StartDataFrameAnalyticsAction.TaskMatcher { +public class DataFrameAnalyticsTask extends LicensedAllocatedPersistentTask implements StartDataFrameAnalyticsAction.TaskMatcher { private static final Logger LOGGER = LogManager.getLogger(DataFrameAnalyticsTask.class); @@ -68,8 +68,18 @@ public class DataFrameAnalyticsTask extends AllocatedPersistentTask implements S public DataFrameAnalyticsTask(long id, String type, String action, TaskId parentTask, Map headers, Client client, DataFrameAnalyticsManager analyticsManager, DataFrameAnalyticsAuditor auditor, - StartDataFrameAnalyticsAction.TaskParams taskParams) { - super(id, type, action, MlTasks.DATA_FRAME_ANALYTICS_TASK_ID_PREFIX + taskParams.getId(), parentTask, headers); + StartDataFrameAnalyticsAction.TaskParams taskParams, XPackLicenseState licenseState) { + super( + id, + type, + action, + MlTasks.DATA_FRAME_ANALYTICS_TASK_ID_PREFIX + taskParams.getId(), + parentTask, + headers, + MachineLearning.ML_ANALYTICS_JOBS_FEATURE, + MlTasks.DATA_FRAME_ANALYTICS_TASK_ID_PREFIX + taskParams.getId(), + licenseState + ); this.client = new ParentTaskAssigningClient(Objects.requireNonNull(client), parentTask); this.analyticsManager = Objects.requireNonNull(analyticsManager); this.auditor = Objects.requireNonNull(auditor); @@ -97,14 +107,6 @@ public StatsHolder getStatsHolder() { return statsHolder; } - @Override - protected void init(PersistentTasksService persistentTasksService, - TaskManager taskManager, - String persistentTaskId, - long allocationId) { - super.init(persistentTasksService, taskManager, persistentTaskId, allocationId); - } - @Override protected void onCancelled() { stop(getReasonCancelled(), StopDataFrameAnalyticsAction.DEFAULT_TIMEOUT); @@ -118,7 +120,7 @@ public boolean shouldCancelChildrenOnCancellation() { } @Override - public void markAsCompleted() { + public void doMarkAsCompleted() { // It is possible that the stop API has been called in the meantime and that // may also cause this method to be called. We check whether we have already // been marked completed to avoid doing it twice. We need to capture that @@ -131,12 +133,12 @@ public void markAsCompleted() { isMarkAsCompletedCalled = true; } - persistProgress(client, taskParams.getId(), () -> super.markAsCompleted()); + persistProgress(client, taskParams.getId(), super::doMarkAsCompleted); } @Override - public void markAsFailed(Exception e) { - persistProgress(client, taskParams.getId(), () -> super.markAsFailed(e)); + public void doMarkAsFailed(Exception e) { + persistProgress(client, taskParams.getId(), () -> super.doMarkAsFailed(e)); } public void stop(String reason, TimeValue timeout) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java index 47f789b9375ce..38332c4c1333b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java @@ -28,6 +28,8 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; import org.elasticsearch.ingest.IngestMetadata; +import org.elasticsearch.license.License; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; @@ -57,6 +59,8 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.ml.MachineLearning.ML_MODEL_INFERENCE_FEATURE; + /** * This is a thread safe model loading service with LRU cache. * Cache entries have a TTL before they are evicted. @@ -129,6 +133,7 @@ private ModelAndConsumer(LocalModel model, Consumer consumer) { private final ByteSizeValue maxCacheSize; private final String localNode; private final CircuitBreaker trainedModelCircuitBreaker; + private final XPackLicenseState licenseState; public ModelLoadingService(TrainedModelProvider trainedModelProvider, InferenceAuditor auditor, @@ -137,7 +142,8 @@ public ModelLoadingService(TrainedModelProvider trainedModelProvider, TrainedModelStatsService modelStatsService, Settings settings, String localNode, - CircuitBreaker trainedModelCircuitBreaker) { + CircuitBreaker trainedModelCircuitBreaker, + XPackLicenseState licenseState) { this.provider = trainedModelProvider; this.threadPool = threadPool; this.maxCacheSize = INFERENCE_MODEL_CACHE_SIZE.get(settings); @@ -154,6 +160,7 @@ public ModelLoadingService(TrainedModelProvider trainedModelProvider, clusterService.addListener(this); this.localNode = localNode; this.trainedModelCircuitBreaker = ExceptionsHelper.requireNonNull(trainedModelCircuitBreaker, "trainedModelCircuitBreaker"); + this.licenseState = licenseState; } // for testing @@ -355,7 +362,6 @@ private void loadWithoutCaching(String modelId, Consumer consumer, ActionListene InferenceConfig inferenceConfig = trainedModelConfig.getInferenceConfig() == null ? inferenceConfigFromTargetType(inferenceDefinition.getTargetType()) : trainedModelConfig.getInferenceConfig(); - try { updateCircuitBreakerEstimate(modelId, inferenceDefinition, trainedModelConfig); } catch (CircuitBreakingException ex) { @@ -422,7 +428,7 @@ private void handleLoadSuccess(String modelId, synchronized (loadingListeners) { populateNewModelAlias(modelId); // If the model is referenced, that means it is currently in a pipeline somewhere - // Also, if the consume is a search consumer, we should always cache it + // Also, if the consumer is a search consumer, we should always cache it if (referencedModels.contains(modelId) || Sets.haveNonEmptyIntersection(modelIdToModelAliases.getOrDefault(modelId, new HashSet<>()), referencedModels) || consumer.equals(Consumer.SEARCH)) { @@ -431,6 +437,14 @@ private void handleLoadSuccess(String modelId, // If it isn't, we flip an `isLoaded` flag, and increment the model counter to make sure if it is evicted // between now and when the listeners access it, the circuit breaker reflects actual usage. localModelCache.computeIfAbsent(modelId, modelAndConsumerLoader); + // We should start tracking on successful load. It will stop being tracked once it evacuates the cache and is no + // longer a referenced model + // NOTE: It is not possible to change the referenced models without locking on `loadingListeners` + // So, if the model is evacuated from cache immediately after checking that it was present, + // the feature usage will still be tracked. + if (License.OperationMode.BASIC.equals(trainedModelConfig.getLicenseLevel()) == false) { + ML_MODEL_INFERENCE_FEATURE.startTracking(licenseState, modelId); + } } catch (ExecutionException ee) { logger.warn(() -> new ParameterizedMessage("[{}] threw when attempting add to cache", modelId), ee); } @@ -507,6 +521,10 @@ private void cacheEvictionListener(RemovalNotification modelId, modelIdToModelAliases.getOrDefault(modelId, new HashSet<>()) )); + // If it's not referenced in a pipeline, stop tracking it on this node + if (referencedModels.contains(modelId) == false) { + ML_MODEL_INFERENCE_FEATURE.stopTracking(licenseState, modelId); + } // If the model is no longer referenced, flush the stats to persist as soon as possible notification.getValue().model.persistStats(referencedModels.contains(modelId) == false); @@ -577,6 +595,13 @@ public void clusterChanged(ClusterChangedEvent event) { logger.trace("[{} ({})] invalidated from cache", modelId, modelAliasOrId); localModelCache.invalidate(modelId); } + // The model is not cached and the model no longer referenced we should ensure that we are not tracking the + // license usage. + // It is possible that we stop referencing it BEFORE its cached, or it was previously invalidated + // Either way, we know we won't put it back in cache as we are synchronized on `loadingListeners` + if (modelAndConsumer == null) { + ML_MODEL_INFERENCE_FEATURE.stopTracking(licenseState, modelId); + } } } // Remove all that are still referenced, i.e. the intersection of allReferencedModelKeys and referencedModels diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTask.java index f4b555d6ab90b..816803bc4ee03 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTask.java @@ -7,25 +7,40 @@ package org.elasticsearch.xpack.ml.job.snapshot.upgrader; -import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.license.LicensedAllocatedPersistentTask; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xpack.core.ml.MlTasks; +import org.elasticsearch.xpack.ml.MachineLearning; import java.util.Map; -public class SnapshotUpgradeTask extends AllocatedPersistentTask { +public class SnapshotUpgradeTask extends LicensedAllocatedPersistentTask { private final String jobId; private final String snapshotId; - public SnapshotUpgradeTask(String jobId, - String snapshotId, - long id, - String type, - String action, - TaskId parentTask, - Map headers) { - super(id, type, action, MlTasks.snapshotUpgradeTaskId(jobId, snapshotId), parentTask, headers); + public SnapshotUpgradeTask( + String jobId, + String snapshotId, + long id, + String type, + String action, + TaskId parentTask, + Map headers, + XPackLicenseState licenseState + ) { + super( + id, + type, + action, + MlTasks.snapshotUpgradeTaskId(jobId, snapshotId), + parentTask, + headers, + MachineLearning.ML_ANOMALY_JOBS_FEATURE, + MlTasks.snapshotUpgradeTaskId(jobId, snapshotId), + licenseState + ); this.jobId = jobId; this.snapshotId = snapshotId; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java index 38199adf3ddb8..c5b931c829f45 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java @@ -19,6 +19,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; @@ -55,6 +56,7 @@ public class SnapshotUpgradeTaskExecutor extends AbstractJobPersistentTasksExecu private final AutodetectProcessManager autodetectProcessManager; private final AnomalyDetectionAuditor auditor; private final JobResultsProvider jobResultsProvider; + private final XPackLicenseState licenseState; private volatile ClusterState clusterState; private final Client client; @@ -63,7 +65,8 @@ public SnapshotUpgradeTaskExecutor(Settings settings, AutodetectProcessManager autodetectProcessManager, MlMemoryTracker memoryTracker, IndexNameExpressionResolver expressionResolver, - Client client) { + Client client, + XPackLicenseState licenseState) { super(MlTasks.JOB_SNAPSHOT_UPGRADE_TASK_NAME, MachineLearning.UTILITY_THREAD_POOL_NAME, settings, @@ -74,6 +77,7 @@ public SnapshotUpgradeTaskExecutor(Settings settings, this.auditor = new AnomalyDetectionAuditor(client, clusterService); this.jobResultsProvider = new JobResultsProvider(client, settings, expressionResolver); this.client = client; + this.licenseState = licenseState; clusterService.addListener(event -> clusterState = event.state()); } @@ -233,7 +237,8 @@ protected AllocatedPersistentTask createTask(long id, String type, String action type, action, parentTaskId, - headers); + headers, + licenseState); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/JobTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/JobTask.java index 2320af7932748..0cf287eebd4f3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/JobTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/JobTask.java @@ -35,9 +35,16 @@ enum ClosingOrVacating { private final AtomicReference closingOrVacating = new AtomicReference<>(ClosingOrVacating.NEITHER); private volatile AutodetectProcessManager autodetectProcessManager; - protected JobTask(String jobId, long id, String type, String action, TaskId parentTask, Map headers, - XPackLicenseState licenseState) { - super(id, type, action, "job-" + jobId, parentTask, headers, MachineLearning.ML_JOBS_FEATURE, "job-" + jobId, licenseState); + protected JobTask( + String jobId, + long id, + String type, + String action, + TaskId parentTask, + Map headers, + XPackLicenseState licenseState + ) { + super(id, type, action, "job-" + jobId, parentTask, headers, MachineLearning.ML_ANOMALY_JOBS_FEATURE, "job-" + jobId, licenseState); this.jobId = jobId; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsActionTests.java index fc2f4d66d602c..14253cffff2a9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsActionTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.indices.TestIndexNameExpressionResolver; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.Assignment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.MlMetadata; @@ -165,7 +166,9 @@ private static TaskExecutor createTaskExecutor() { mock(DataFrameAnalyticsManager.class), mock(DataFrameAnalyticsAuditor.class), mock(MlMemoryTracker.class), - TestIndexNameExpressionResolver.newInstance()); + TestIndexNameExpressionResolver.newInstance(), + mock(XPackLicenseState.class) + ); } private static DiscoveryNode createNode(int i, boolean isMlNode, Version nodeVersion) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java index 2b675b60449b9..c179372134c00 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.persistent.UpdatePersistentTaskStatusAction; import org.elasticsearch.search.SearchHit; @@ -181,7 +182,7 @@ private void testPersistProgress(SearchHits searchHits, String expectedIndexOrAl DataFrameAnalyticsTask task = new DataFrameAnalyticsTask( - 123, "type", "action", null, Map.of(), client, analyticsManager, auditor, taskParams); + 123, "type", "action", null, Map.of(), client, analyticsManager, auditor, taskParams, mock(XPackLicenseState.class)); task.init(persistentTasksService, taskManager, "task-id", 42); task.setStatsHolder(new StatsHolder(progress, null, null, new DataCounts("test_job"))); @@ -258,7 +259,7 @@ private void testSetFailed(boolean nodeShuttingDown) throws IOException { DataFrameAnalyticsTask task = new DataFrameAnalyticsTask( - 123, "type", "action", null, Map.of(), client, analyticsManager, auditor, taskParams); + 123, "type", "action", null, Map.of(), client, analyticsManager, auditor, taskParams, mock(XPackLicenseState.class)); task.init(persistentTasksService, taskManager, "task-id", 42); task.setStatsHolder(new StatsHolder(progress, null, null, new DataCounts("test_job"))); task.setStep(new StubReindexingStep(task.getStatsHolder().getProgressTracker())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java index 9ede4b215c7ac..e066d60e83829 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; @@ -131,7 +132,8 @@ public void testGetCachedModels() throws Exception { trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); modelLoadingService.clusterChanged(ingestChangedEvent(model1, model2, model3)); @@ -182,7 +184,8 @@ public void testMaxCachedLimitReached() throws Exception { trainedModelStatsService, Settings.builder().put(ModelLoadingService.INFERENCE_MODEL_CACHE_SIZE.getKey(), ByteSizeValue.ofBytes(20L)).build(), "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); // We want to be notified when the models are loaded which happens in a background thread ModelLoadedTracker loadedTracker = new ModelLoadedTracker(Arrays.asList(modelIds)); @@ -297,7 +300,8 @@ public void testWhenCacheEnabledButNotIngestNode() throws Exception { trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); modelLoadingService.clusterChanged(ingestChangedEvent(false, model1)); @@ -323,7 +327,8 @@ public void testGetCachedMissingModel() throws Exception { trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); modelLoadingService.clusterChanged(ingestChangedEvent(model)); PlainActionFuture future = new PlainActionFuture<>(); @@ -352,7 +357,8 @@ public void testGetMissingModel() { trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); PlainActionFuture future = new PlainActionFuture<>(); modelLoadingService.getModelForPipeline(model, future); @@ -376,7 +382,8 @@ public void testGetModelEagerly() throws Exception { trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); for(int i = 0; i < 3; i++) { PlainActionFuture future = new PlainActionFuture<>(); @@ -400,7 +407,8 @@ public void testGetModelForSearch() throws Exception { trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); for(int i = 0; i < 3; i++) { PlainActionFuture future = new PlainActionFuture<>(); @@ -429,7 +437,8 @@ public void testCircuitBreakerBreak() throws Exception { trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); modelLoadingService.addModelLoadedListener(model3, ActionListener.wrap( r -> fail("Should not have succeeded to load model as breaker should be reached"), @@ -469,7 +478,8 @@ public void testReferenceCounting() throws Exception { trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); modelLoadingService.clusterChanged(ingestChangedEvent(modelId)); @@ -503,7 +513,8 @@ public void testReferenceCountingForPipeline() throws Exception { trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); modelLoadingService.clusterChanged(ingestChangedEvent(modelId)); @@ -533,7 +544,8 @@ public void testReferenceCounting_ModelIsNotCached() throws ExecutionException, trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); PlainActionFuture future = new PlainActionFuture<>(); modelLoadingService.getModelForPipeline(modelId, future); @@ -554,7 +566,8 @@ public void testGetCachedModelViaModelAliases() throws Exception { trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); modelLoadingService.clusterChanged(aliasChangeEvent( true, @@ -610,7 +623,8 @@ public void testAliasesGetUpdatedEvenWhenNotIngestNode() throws IOException { trainedModelStatsService, Settings.EMPTY, "test-node", - circuitBreaker); + circuitBreaker, + mock(XPackLicenseState.class)); modelLoadingService.clusterChanged(aliasChangeEvent( false, From 7afa5e564621dce24f0130b8eb2711ef5ac296a1 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Wed, 1 Sep 2021 15:39:25 +0300 Subject: [PATCH 061/128] [ML] Fix failure on datafeed preview with date_nanos time field (#77109) Preview datafeed currently fails when the time field is of type `date_nanos`. The failure contains the error message: ``` date [...] is after 2262-04-11T23:47:16.854775807 and cannot be stored in nanosecond resolution ``` This commit fixes this failure. The cause of the issue was that preview generates a search with a range query on the time field whose upper bound is `Long.MAX_VALUE` in order to include all available data in the preview. However, that value is parsed with `DateUtils.toLong` when the time field is `date_nanos` and it hits the limitation that values can't be larger than `DateUtils.MAX_NANOSECOND_INSTANT`. The fix checks whether the time field is `date_nanos` and uses `DateUtils.MAX_NANOSECOND_INSTANT` as the upper bound instead of `Long.MAX_VALUE`. --- .../TransportPreviewDatafeedAction.java | 40 ++++++- .../test/ml/preview_datafeed.yml | 101 ++++++++++++++++++ 2 files changed, 136 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java index 23f6f510bb30f..baa896d23de32 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java @@ -7,13 +7,18 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -35,9 +40,12 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Date; +import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeWithHeadersAsync; import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; import static org.elasticsearch.xpack.ml.utils.SecondaryAuthorizationUtils.useSecondaryAuthIfAvailable; @@ -93,22 +101,27 @@ private void previewDatafeed( Job job, ActionListener listener ) { - DatafeedConfig.Builder previewDatafeed = buildPreviewDatafeed(datafeedConfig); + DatafeedConfig.Builder previewDatafeedBuilder = buildPreviewDatafeed(datafeedConfig); useSecondaryAuthIfAvailable(securityContext, () -> { - previewDatafeed.setHeaders(filterSecurityHeaders(threadPool.getThreadContext().getHeaders())); + previewDatafeedBuilder.setHeaders(filterSecurityHeaders(threadPool.getThreadContext().getHeaders())); // NB: this is using the client from the transport layer, NOT the internal client. // This is important because it means the datafeed search will fail if the user // requesting the preview doesn't have permission to search the relevant indices. + DatafeedConfig previewDatafeedConfig = previewDatafeedBuilder.build(); DataExtractorFactory.create( client, - previewDatafeed.build(), + previewDatafeedConfig, job, xContentRegistry, // Fake DatafeedTimingStatsReporter that does not have access to results index new DatafeedTimingStatsReporter(new DatafeedTimingStats(datafeedConfig.getJobId()), (ts, refreshPolicy) -> {}), listener.delegateFailure((l, dataExtractorFactory) -> { - DataExtractor dataExtractor = dataExtractorFactory.newExtractor(0, Long.MAX_VALUE); - threadPool.generic().execute(() -> previewDatafeed(dataExtractor, l)); + isDateNanos(previewDatafeedConfig.getHeaders(), job.getDataDescription().getTimeField(), + listener.delegateFailure((l2, isDateNanos) -> { + DataExtractor dataExtractor = dataExtractorFactory.newExtractor(0, + isDateNanos ? DateUtils.MAX_NANOSECOND_INSTANT.toEpochMilli() : Long.MAX_VALUE); + threadPool.generic().execute(() -> previewDatafeed(dataExtractor, l)); + })); })); }); } @@ -130,6 +143,23 @@ static DatafeedConfig.Builder buildPreviewDatafeed(DatafeedConfig datafeed) { return previewDatafeed; } + private void isDateNanos(Map headers, String timeField, ActionListener listener) { + executeWithHeadersAsync( + headers, + ML_ORIGIN, + client, + FieldCapabilitiesAction.INSTANCE, + new FieldCapabilitiesRequest().fields(timeField), + ActionListener.wrap( + fieldCapsResponse -> { + Map timeFieldCaps = fieldCapsResponse.getField(timeField); + listener.onResponse(timeFieldCaps.keySet().contains(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)); + }, + listener::onFailure + ) + ); + } + /** Visible for testing */ static void previewDatafeed(DataExtractor dataExtractor, ActionListener listener) { try { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml index 3466c350baf60..bd8b567c2a514 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml @@ -752,3 +752,104 @@ setup: - match: { 2.airline: foo } - match: { 2.responsetime_x_2: 84.0 } - match: { 2.doc_count: 1 } + +--- +"Test preview datafeed where time field is data_nanos": + + - do: + indices.create: + index: index-airline-data-date-nanos + body: + mappings: + properties: + time: + type: date_nanos + airline: + type: keyword + responsetime: + type: float + event_rate: + type: integer + + - do: + index: + index: index-airline-data-date-nanos + id: 1 + body: > + { + "time": "2017-02-18T00:00:00Z", + "airline": "foo", + "responsetime": 1.0, + "event_rate": 5 + } + + - do: + index: + index: index-airline-data-date-nanos + id: 2 + body: > + { + "time": "2017-02-18T00:30:00Z", + "airline": "foo", + "responsetime": 1.0, + "event_rate": 6 + } + + - do: + index: + index: index-airline-data-date-nanos + id: 3 + body: > + { + "time": "2017-02-18T01:00:00Z", + "airline": "bar", + "responsetime": 42.0, + "event_rate": 8 + } + + - do: + index: + index: index-airline-data-date-nanos + id: 4 + body: > + { + "time": "2017-02-18T01:01:00Z", + "airline": "foo", + "responsetime": 42.0, + "event_rate": 7 + } + + - do: + indices.refresh: + index: index-airline-data-date-nanos + + - do: + ml.preview_datafeed: + body: > + { + "datafeed_config": { + "indexes":"index-airline-data-date-nanos" + }, + "job_config": { + "analysis_config": { + "bucket_span": "1h", + "detectors": [{"function":"sum","field_name":"responsetime","by_field_name":"airline"}] + }, + "data_description": { + "time_field":"time" + } + } + } + - length: { $body: 4 } + - match: { 0.time: 1487376000000 } + - match: { 0.airline: foo } + - match: { 0.responsetime: 1.0 } + - match: { 1.time: 1487377800000 } + - match: { 1.airline: foo } + - match: { 1.responsetime: 1.0 } + - match: { 2.time: 1487379600000 } + - match: { 2.airline: bar } + - match: { 2.responsetime: 42.0 } + - match: { 3.time: 1487379660000 } + - match: { 3.airline: foo } + - match: { 3.responsetime: 42.0 } From 0e1efa6533c7271cd1ac2d23d72251aadcecaaf9 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 1 Sep 2021 08:45:13 -0400 Subject: [PATCH 062/128] [ML] generalize pytorch sentiment analysis to text classification (#77084) * [ML] generalize pytorch sentiment analysis to text classification * Update x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java --- .../infer-trained-model-deployment.asciidoc | 6 +- .../MlInferenceNamedXContentProvider.java | 16 +-- .../results/SentimentAnalysisResults.java | 107 ------------------ .../results/TextClassificationResults.java | 76 +++++++++++++ .../ml/inference/results/TopClassEntry.java | 4 + .../results/WarningInferenceResults.java | 5 + ...fig.java => TextClassificationConfig.java} | 44 ++++--- ...va => TextClassificationResultsTests.java} | 23 ++-- ...ava => TextClassificationConfigTests.java} | 21 ++-- .../xpack/ml/inference/nlp/TaskType.java | 6 +- ....java => TextClassificationProcessor.java} | 66 ++++++++--- ... => TextClassificationProcessorTests.java} | 55 +++++---- 12 files changed, 240 insertions(+), 189 deletions(-) delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/SentimentAnalysisResults.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextClassificationResults.java rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/{SentimentAnalysisConfig.java => TextClassificationConfig.java} (67%) rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/{SentimentAnalysisResultsTests.java => TextClassificationResultsTests.java} (51%) rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/{SentimentAnalysisConfigTests.java => TextClassificationConfigTests.java} (58%) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/{SentimentAnalysisProcessor.java => TextClassificationProcessor.java} (52%) rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/{SentimentAnalysisProcessorTests.java => TextClassificationProcessorTests.java} (57%) diff --git a/docs/reference/ml/df-analytics/apis/infer-trained-model-deployment.asciidoc b/docs/reference/ml/df-analytics/apis/infer-trained-model-deployment.asciidoc index 9e1ce728d553d..a08e963bd340d 100644 --- a/docs/reference/ml/df-analytics/apis/infer-trained-model-deployment.asciidoc +++ b/docs/reference/ml/df-analytics/apis/infer-trained-model-deployment.asciidoc @@ -59,8 +59,8 @@ The input text for evaluation. [[infer-trained-model-deployment-example]] == {api-examples-title} -The response depends on the task the model is trained for. If it is a -sentiment analysis task, the response is the score. For example: +The response depends on the task the model is trained for. If it is a +text classification task, the response is the score. For example: [source,console] -------------------------------------------------- @@ -77,7 +77,7 @@ The API returns scores in this case, for example: ---- { "positive" : 0.9998062667902223, - "negative" : 1.9373320977752957E-4 + "negative" : 1.9373320977752957E-4 } ---- // NOTCONSOLE diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java index 09ef13a028f2f..42b8b983396a0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java @@ -25,7 +25,7 @@ import org.elasticsearch.xpack.core.ml.inference.results.NerResults; import org.elasticsearch.xpack.core.ml.inference.results.PyTorchPassThroughResults; import org.elasticsearch.xpack.core.ml.inference.results.RegressionInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.SentimentAnalysisResults; +import org.elasticsearch.xpack.core.ml.inference.results.TextClassificationResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.DistilBertTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertPassThroughConfig; @@ -44,7 +44,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ResultsFieldUpdate; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.SentimentAnalysisConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.StrictlyParsedInferenceConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.StrictlyParsedTrainedModel; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.StrictlyParsedTrainedModelLocation; @@ -172,9 +172,9 @@ public List getNamedXContentParsers() { namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedInferenceConfig.class, new ParseField(FillMaskConfig.NAME), FillMaskConfig::fromXContentStrict)); namedXContent.add(new NamedXContentRegistry.Entry(LenientlyParsedInferenceConfig.class, - new ParseField(SentimentAnalysisConfig.NAME), SentimentAnalysisConfig::fromXContentLenient)); - namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedInferenceConfig.class, new ParseField(SentimentAnalysisConfig.NAME), - SentimentAnalysisConfig::fromXContentStrict)); + new ParseField(TextClassificationConfig.NAME), TextClassificationConfig::fromXContentLenient)); + namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedInferenceConfig.class, + new ParseField(TextClassificationConfig.NAME), TextClassificationConfig::fromXContentStrict)); namedXContent.add(new NamedXContentRegistry.Entry(LenientlyParsedInferenceConfig.class, new ParseField(BertPassThroughConfig.NAME), BertPassThroughConfig::fromXContentLenient)); namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedInferenceConfig.class, new ParseField(BertPassThroughConfig.NAME), @@ -269,8 +269,8 @@ public List getNamedWriteables() { PyTorchPassThroughResults.NAME, PyTorchPassThroughResults::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(InferenceResults.class, - SentimentAnalysisResults.NAME, - SentimentAnalysisResults::new)); + TextClassificationResults.NAME, + TextClassificationResults::new)); // Inference Configs namedWriteables.add(new NamedWriteableRegistry.Entry(InferenceConfig.class, @@ -282,7 +282,7 @@ public List getNamedWriteables() { namedWriteables.add(new NamedWriteableRegistry.Entry(InferenceConfig.class, FillMaskConfig.NAME, FillMaskConfig::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(InferenceConfig.class, - SentimentAnalysisConfig.NAME, SentimentAnalysisConfig::new)); + TextClassificationConfig.NAME, TextClassificationConfig::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(InferenceConfig.class, BertPassThroughConfig.NAME, BertPassThroughConfig::new)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/SentimentAnalysisResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/SentimentAnalysisResults.java deleted file mode 100644 index 7b98949ec5a13..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/SentimentAnalysisResults.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.ml.inference.results; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Objects; - -public class SentimentAnalysisResults implements InferenceResults { - - public static final String NAME = "sentiment_analysis_result"; - - private final String class1Label; - private final String class2Label; - private final double class1Score; - private final double class2Score; - - public SentimentAnalysisResults(String class1Label, double class1Score, - String class2Label, double class2Score) { - this.class1Label = class1Label; - this.class1Score = class1Score; - this.class2Label = class2Label; - this.class2Score = class2Score; - } - - public SentimentAnalysisResults(StreamInput in) throws IOException { - class1Label = in.readString(); - class1Score = in.readDouble(); - class2Label = in.readString(); - class2Score = in.readDouble(); - } - - public String getClass1Label() { - return class1Label; - } - - public double getClass1Score() { - return class1Score; - } - - public String getClass2Label() { - return class2Label; - } - - public double getClass2Score() { - return class2Score; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(class1Label, class1Score); - builder.field(class2Label, class2Score); - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(class1Label); - out.writeDouble(class1Score); - out.writeString(class2Label); - out.writeDouble(class2Score); - } - - @Override - public Map asMap() { - Map map = new LinkedHashMap<>(); - map.put(class1Label, class1Score); - map.put(class2Label, class2Score); - return map; - } - - @Override - public Object predictedValue() { - return class1Score; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - SentimentAnalysisResults that = (SentimentAnalysisResults) o; - return Double.compare(that.class1Score, class1Score) == 0 && - Double.compare(that.class2Score, class2Score) == 0 && - Objects.equals(this.class1Label, that.class1Label) && - Objects.equals(this.class2Label, that.class2Label); - } - - @Override - public int hashCode() { - return Objects.hash(class1Label, class1Score, class2Label, class2Score); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextClassificationResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextClassificationResults.java new file mode 100644 index 0000000000000..c9bb405e298ba --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextClassificationResults.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.inference.results; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class TextClassificationResults implements InferenceResults { + + public static final String NAME = "text_classification_result"; + + private final List entryList; + + public TextClassificationResults(List entryList) { + this.entryList = entryList; + } + + public TextClassificationResults(StreamInput in) throws IOException { + entryList = in.readList(TopClassEntry::new); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.mapContents(asMap()); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeList(entryList); + } + + @Override + public Map asMap() { + Map map = new LinkedHashMap<>(); + for (TopClassEntry entry : entryList) { + map.put(entry.getClassification().toString(), entry.getScore()); + } + return map; + } + + @Override + public Object predictedValue() { + return entryList.get(0).getScore(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TextClassificationResults that = (TextClassificationResults) o; + return Objects.equals(that.entryList, entryList); + } + + @Override + public int hashCode() { + return Objects.hash(entryList); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TopClassEntry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TopClassEntry.java index 8314af6dd9e10..42286e7665f88 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TopClassEntry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TopClassEntry.java @@ -66,6 +66,10 @@ public static TopClassEntry fromXContent(XContentParser parser) throws IOExcepti private final double probability; private final double score; + public TopClassEntry(Object classification, double score) { + this(classification, score, score); + } + public TopClassEntry(Object classification, double probability, double score) { this.classification = ExceptionsHelper.requireNonNull(classification, CLASS_NAME); this.probability = probability; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/WarningInferenceResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/WarningInferenceResults.java index 1651425bce7ce..59c250e595096 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/WarningInferenceResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/WarningInferenceResults.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.inference.results; +import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -23,6 +24,10 @@ public class WarningInferenceResults implements InferenceResults { private final String warning; + public WarningInferenceResults(String warning, Object... args) { + this(LoggerMessageFormat.format(warning, args)); + } + public WarningInferenceResults(String warning) { this.warning = warning; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/SentimentAnalysisConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java similarity index 67% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/SentimentAnalysisConfig.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java index e6de8f295c852..556230bfadd92 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/SentimentAnalysisConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.core.Nullable; @@ -21,50 +22,58 @@ import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Optional; -public class SentimentAnalysisConfig implements NlpConfig { +public class TextClassificationConfig implements NlpConfig { - public static final String NAME = "sentiment_analysis"; + public static final String NAME = "text_classification"; + public static final ParseField NUM_TOP_CLASSES = new ParseField("num_top_classes"); - public static SentimentAnalysisConfig fromXContentStrict(XContentParser parser) { + public static TextClassificationConfig fromXContentStrict(XContentParser parser) { return STRICT_PARSER.apply(parser, null); } - public static SentimentAnalysisConfig fromXContentLenient(XContentParser parser) { + public static TextClassificationConfig fromXContentLenient(XContentParser parser) { return LENIENT_PARSER.apply(parser, null); } - private static final ConstructingObjectParser STRICT_PARSER = createParser(false); - private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + private static final ConstructingObjectParser STRICT_PARSER = createParser(false); + private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); @SuppressWarnings({ "unchecked"}) - private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { - ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, - a -> new SentimentAnalysisConfig((VocabularyConfig) a[0], (Tokenization) a[1], (List) a[2])); + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, + a -> new TextClassificationConfig((VocabularyConfig) a[0], (Tokenization) a[1], (List) a[2], (Integer) a[3])); parser.declareObject(ConstructingObjectParser.constructorArg(), VocabularyConfig.createParser(ignoreUnknownFields), VOCABULARY); parser.declareNamedObject( ConstructingObjectParser.optionalConstructorArg(), (p, c, n) -> p.namedObject(Tokenization.class, n, ignoreUnknownFields), TOKENIZATION ); parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), CLASSIFICATION_LABELS); + parser.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_CLASSES); return parser; } private final VocabularyConfig vocabularyConfig; private final Tokenization tokenization; private final List classificationLabels; + private final int numTopClasses; - public SentimentAnalysisConfig(VocabularyConfig vocabularyConfig, @Nullable Tokenization tokenization, - @Nullable List classificationLabels) { + public TextClassificationConfig(VocabularyConfig vocabularyConfig, + @Nullable Tokenization tokenization, + @Nullable List classificationLabels, + @Nullable Integer numTopClasses) { this.vocabularyConfig = ExceptionsHelper.requireNonNull(vocabularyConfig, VOCABULARY); this.tokenization = tokenization == null ? Tokenization.createDefault() : tokenization; this.classificationLabels = classificationLabels == null ? Collections.emptyList() : classificationLabels; + this.numTopClasses = Optional.ofNullable(numTopClasses).orElse(-1); } - public SentimentAnalysisConfig(StreamInput in) throws IOException { + public TextClassificationConfig(StreamInput in) throws IOException { vocabularyConfig = new VocabularyConfig(in); tokenization = in.readNamedWriteable(Tokenization.class); classificationLabels = in.readStringList(); + numTopClasses = in.readInt(); } @Override @@ -72,6 +81,7 @@ public void writeTo(StreamOutput out) throws IOException { vocabularyConfig.writeTo(out); out.writeNamedWriteable(tokenization); out.writeStringCollection(classificationLabels); + out.writeInt(numTopClasses); } @Override @@ -82,6 +92,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (classificationLabels.isEmpty() == false) { builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels); } + builder.field(NUM_TOP_CLASSES.getPreferredName(), numTopClasses); builder.endObject(); return builder; } @@ -111,15 +122,16 @@ public boolean equals(Object o) { if (o == this) return true; if (o == null || getClass() != o.getClass()) return false; - SentimentAnalysisConfig that = (SentimentAnalysisConfig) o; + TextClassificationConfig that = (TextClassificationConfig) o; return Objects.equals(vocabularyConfig, that.vocabularyConfig) && Objects.equals(tokenization, that.tokenization) + && Objects.equals(numTopClasses, that.numTopClasses) && Objects.equals(classificationLabels, that.classificationLabels); } @Override public int hashCode() { - return Objects.hash(vocabularyConfig, tokenization, classificationLabels); + return Objects.hash(vocabularyConfig, tokenization, classificationLabels, numTopClasses); } @Override @@ -136,6 +148,10 @@ public List getClassificationLabels() { return classificationLabels; } + public int getNumTopClasses() { + return numTopClasses; + } + @Override public boolean isAllocateOnly() { return true; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/SentimentAnalysisResultsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/TextClassificationResultsTests.java similarity index 51% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/SentimentAnalysisResultsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/TextClassificationResultsTests.java index dfdff35a03220..8941070932088 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/SentimentAnalysisResultsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/TextClassificationResultsTests.java @@ -10,25 +10,34 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.hasSize; -public class SentimentAnalysisResultsTests extends AbstractWireSerializingTestCase { +public class TextClassificationResultsTests extends AbstractWireSerializingTestCase { @Override - protected Writeable.Reader instanceReader() { - return SentimentAnalysisResults::new; + protected Writeable.Reader instanceReader() { + return TextClassificationResults::new; } @Override - protected SentimentAnalysisResults createTestInstance() { - return new SentimentAnalysisResults(randomAlphaOfLength(6), randomDouble(), - randomAlphaOfLength(6), randomDouble()); + protected TextClassificationResults createTestInstance() { + return new TextClassificationResults( + Stream.generate(TopClassEntryTests::createRandomTopClassEntry).limit(randomIntBetween(2, 5)).collect(Collectors.toList()) + ); } public void testAsMap() { - SentimentAnalysisResults testInstance = new SentimentAnalysisResults("foo", 1.0, "bar", 0.0); + TextClassificationResults testInstance = new TextClassificationResults( + List.of( + new TopClassEntry("foo", 1.0), + new TopClassEntry("bar", 0.0) + ) + ); Map asMap = testInstance.asMap(); assertThat(asMap.keySet(), hasSize(2)); assertThat(1.0, closeTo((Double)asMap.get("foo"), 0.0001)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/SentimentAnalysisConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigTests.java similarity index 58% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/SentimentAnalysisConfigTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigTests.java index 06f62caa052cb..6eb89df38478e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/SentimentAnalysisConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigTests.java @@ -15,7 +15,7 @@ import java.io.IOException; -public class SentimentAnalysisConfigTests extends InferenceConfigItemTestCase { +public class TextClassificationConfigTests extends InferenceConfigItemTestCase { private boolean lenient; @@ -25,32 +25,33 @@ public void chooseStrictOrLenient() { } @Override - protected SentimentAnalysisConfig doParseInstance(XContentParser parser) throws IOException { - return lenient ? SentimentAnalysisConfig.fromXContentLenient(parser) : SentimentAnalysisConfig.fromXContentStrict(parser); + protected TextClassificationConfig doParseInstance(XContentParser parser) throws IOException { + return lenient ? TextClassificationConfig.fromXContentLenient(parser) : TextClassificationConfig.fromXContentStrict(parser); } @Override - protected Writeable.Reader instanceReader() { - return SentimentAnalysisConfig::new; + protected Writeable.Reader instanceReader() { + return TextClassificationConfig::new; } @Override - protected SentimentAnalysisConfig createTestInstance() { + protected TextClassificationConfig createTestInstance() { return createRandom(); } @Override - protected SentimentAnalysisConfig mutateInstanceForVersion(SentimentAnalysisConfig instance, Version version) { + protected TextClassificationConfig mutateInstanceForVersion(TextClassificationConfig instance, Version version) { return instance; } - public static SentimentAnalysisConfig createRandom() { - return new SentimentAnalysisConfig( + public static TextClassificationConfig createRandom() { + return new TextClassificationConfig( VocabularyConfigTests.createRandom(), randomBoolean() ? null : randomFrom(BertTokenizationTests.createRandom(), DistilBertTokenizationTests.createRandom()), - randomBoolean() ? null : randomList(5, () -> randomAlphaOfLength(10)) + randomBoolean() ? null : randomList(5, () -> randomAlphaOfLength(10)), + randomBoolean() ? null : randomIntBetween(-1, 10) ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TaskType.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TaskType.java index 5338f4552b5f6..c0f9415af0638 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TaskType.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TaskType.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.FillMaskConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NerConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.SentimentAnalysisConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; import java.util.Locale; @@ -24,10 +24,10 @@ public NlpTask.Processor createProcessor(NlpTokenizer tokenizer, NlpConfig confi return new NerProcessor(tokenizer, (NerConfig) config); } }, - SENTIMENT_ANALYSIS { + TEXT_CLASSIFICATION { @Override public NlpTask.Processor createProcessor(NlpTokenizer tokenizer, NlpConfig config) { - return new SentimentAnalysisProcessor(tokenizer, (SentimentAnalysisConfig) config); + return new TextClassificationProcessor(tokenizer, (TextClassificationConfig) config); } }, FILL_MASK { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/SentimentAnalysisProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java similarity index 52% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/SentimentAnalysisProcessor.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java index 64238607eb736..91cfc0bc7d7a8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/SentimentAnalysisProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java @@ -7,45 +7,67 @@ package org.elasticsearch.xpack.ml.inference.nlp; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.SentimentAnalysisResults; +import org.elasticsearch.xpack.core.ml.inference.results.TextClassificationResults; +import org.elasticsearch.xpack.core.ml.inference.results.TopClassEntry; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.SentimentAnalysisConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig; import org.elasticsearch.xpack.ml.inference.deployment.PyTorchResult; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; import java.io.IOException; import java.util.Arrays; +import java.util.Comparator; import java.util.List; import java.util.Locale; +import java.util.stream.Collectors; +import java.util.stream.IntStream; -public class SentimentAnalysisProcessor implements NlpTask.Processor { +public class TextClassificationProcessor implements NlpTask.Processor { private final NlpTokenizer tokenizer; - private final List classLabels; + private final String[] classLabels; + private final int numTopClasses; - SentimentAnalysisProcessor(NlpTokenizer tokenizer, SentimentAnalysisConfig config) { + TextClassificationProcessor(NlpTokenizer tokenizer, TextClassificationConfig config) { this.tokenizer = tokenizer; List classLabels = config.getClassificationLabels(); if (classLabels == null || classLabels.isEmpty()) { - this.classLabels = List.of("negative", "positive"); + this.classLabels = new String[] {"negative", "positive"}; } else { - this.classLabels = classLabels; + this.classLabels = classLabels.toArray(String[]::new); } - + // negative values are a special case of asking for ALL classes. Since we require the output size to equal the classLabel size + // This is a nice way of setting the value + this.numTopClasses = config.getNumTopClasses() < 0 ? this.classLabels.length : config.getNumTopClasses(); validate(); } private void validate() { - if (classLabels.size() != 2) { + if (classLabels.length < 2) { throw new ValidationException().addValidationError( - String.format(Locale.ROOT, "Sentiment analysis requires exactly 2 [%s]. Invalid labels %s", - SentimentAnalysisConfig.CLASSIFICATION_LABELS, classLabels) + String.format( + Locale.ROOT, + "Text classification requires at least 2 [%s]. Invalid labels [%s]", + TextClassificationConfig.CLASSIFICATION_LABELS, + Strings.arrayToCommaDelimitedString(classLabels) + ) + ); + } + if (numTopClasses == 0) { + throw new ValidationException().addValidationError( + String.format( + Locale.ROOT, + "Text classification requires at least 1 [%s]; provided [%d]", + TextClassificationConfig.NUM_TOP_CLASSES, + numTopClasses + ) ); } } @@ -72,18 +94,26 @@ public NlpTask.ResultProcessor getResultProcessor() { InferenceResults processResult(TokenizationResult tokenization, PyTorchResult pyTorchResult) { if (pyTorchResult.getInferenceResult().length < 1) { - return new WarningInferenceResults("Sentiment analysis result has no data"); + return new WarningInferenceResults("Text classification result has no data"); } - if (pyTorchResult.getInferenceResult()[0].length < 2) { - return new WarningInferenceResults("Expected 2 values in sentiment analysis result"); + if (pyTorchResult.getInferenceResult()[0].length != classLabels.length) { + return new WarningInferenceResults( + "Expected exactly [{}] values in text classification result; got [{}]", + classLabels.length, + pyTorchResult.getInferenceResult()[0].length + ); } double[] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[0]); - // the second score is usually the positive score so put that first - // so it comes first in the results doc - return new SentimentAnalysisResults(classLabels.get(1), normalizedScores[1], - classLabels.get(0), normalizedScores[0]); + return new TextClassificationResults( + IntStream.range(0, normalizedScores.length) + .mapToObj(i -> new TopClassEntry(classLabels[i], normalizedScores[i])) + // Put the highest scoring class first + .sorted(Comparator.comparing(TopClassEntry::getProbability).reversed()) + .limit(numTopClasses) + .collect(Collectors.toList()) + ); } static BytesReference jsonRequest(int[] tokens, String requestId) throws IOException { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/SentimentAnalysisProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java similarity index 57% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/SentimentAnalysisProcessorTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java index 4036e13cbe850..a43efc8499c72 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/SentimentAnalysisProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.SentimentAnalysisConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.VocabularyConfig; import org.elasticsearch.xpack.ml.inference.deployment.PyTorchResult; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; @@ -29,24 +29,25 @@ import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Mockito.mock; -public class SentimentAnalysisProcessorTests extends ESTestCase { +public class TextClassificationProcessorTests extends ESTestCase { public void testInvalidResult() { - SentimentAnalysisConfig config = new SentimentAnalysisConfig(new VocabularyConfig("test-index", "vocab"), null, null); - SentimentAnalysisProcessor processor = new SentimentAnalysisProcessor(mock(BertTokenizer.class), config); + TextClassificationConfig config = new TextClassificationConfig(new VocabularyConfig("test-index", "vocab"), null, null, null); + TextClassificationProcessor processor = new TextClassificationProcessor(mock(BertTokenizer.class), config); { - PyTorchResult torchResult = new PyTorchResult("foo", new double[][]{}, 0L, null); + PyTorchResult torchResult = new PyTorchResult("foo", new double[][] {}, 0L, null); InferenceResults inferenceResults = processor.processResult(null, torchResult); assertThat(inferenceResults, instanceOf(WarningInferenceResults.class)); - assertEquals("Sentiment analysis result has no data", - ((WarningInferenceResults) inferenceResults).getWarning()); + assertEquals("Text classification result has no data", ((WarningInferenceResults) inferenceResults).getWarning()); } { - PyTorchResult torchResult = new PyTorchResult("foo", new double[][]{{1.0}}, 0L, null); + PyTorchResult torchResult = new PyTorchResult("foo", new double[][] { { 1.0 } }, 0L, null); InferenceResults inferenceResults = processor.processResult(null, torchResult); assertThat(inferenceResults, instanceOf(WarningInferenceResults.class)); - assertEquals("Expected 2 values in sentiment analysis result", - ((WarningInferenceResults)inferenceResults).getWarning()); + assertEquals( + "Expected exactly [2] values in text classification result; got [1]", + ((WarningInferenceResults) inferenceResults).getWarning() + ); } } @@ -56,8 +57,8 @@ public void testBuildRequest() throws IOException { new BertTokenization(null, null, 512) ).build(); - SentimentAnalysisConfig config = new SentimentAnalysisConfig(new VocabularyConfig("test-index", "vocab"), null, null); - SentimentAnalysisProcessor processor = new SentimentAnalysisProcessor(tokenizer, config); + TextClassificationConfig config = new TextClassificationConfig(new VocabularyConfig("test-index", "vocab"), null, null, null); + TextClassificationProcessor processor = new TextClassificationProcessor(tokenizer, config); NlpTask.Request request = processor.buildRequest("Elasticsearch fun", "request1"); @@ -70,14 +71,30 @@ public void testBuildRequest() throws IOException { } public void testValidate() { + ValidationException validationException = expectThrows( + ValidationException.class, + () -> new TextClassificationProcessor( + mock(BertTokenizer.class), + new TextClassificationConfig(new VocabularyConfig("test-index", "vocab"), null, List.of("too few"), null) + ) + ); - SentimentAnalysisConfig config = new SentimentAnalysisConfig(new VocabularyConfig("test-index", "vocab"), null, - List.of("too", "many", "class", "labels")); + assertThat( + validationException.getMessage(), + containsString("Text classification requires at least 2 [classification_labels]. Invalid labels [too few]") + ); - ValidationException validationException = expectThrows(ValidationException.class, - () -> new SentimentAnalysisProcessor(mock(BertTokenizer.class), config)); + validationException = expectThrows( + ValidationException.class, + () -> new TextClassificationProcessor( + mock(BertTokenizer.class), + new TextClassificationConfig(new VocabularyConfig("test-index", "vocab"), null, List.of("class", "labels"), 0) + ) + ); - assertThat(validationException.getMessage(), - containsString("Sentiment analysis requires exactly 2 [classification_labels]. Invalid labels [too, many, class, labels]")); + assertThat( + validationException.getMessage(), + containsString("Text classification requires at least 1 [num_top_classes]; provided [0]") + ); } - } +} From 429beba51796947ff513a010df648d5f559d3a1d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 1 Sep 2021 09:06:24 -0400 Subject: [PATCH 063/128] Centralize doc values checking (#77089) This adds two utility methods for to validate the parameters to the `docValueFormat` method and replaces a pile of copy and pasted code with calls to them. They just emit a standard error message if the any unsupported parameters are provided. --- .../index/mapper/ScaledFloatFieldMapper.java | 8 ++----- .../index/mapper/BooleanFieldMapper.java | 9 ++------ .../index/mapper/BooleanScriptFieldType.java | 8 ++----- .../index/mapper/DoubleScriptFieldType.java | 4 +--- .../index/mapper/IpFieldMapper.java | 9 ++------ .../index/mapper/IpScriptFieldType.java | 13 +++-------- .../index/mapper/LongScriptFieldType.java | 4 +--- .../index/mapper/MappedFieldType.java | 23 +++++++++++++++---- .../index/mapper/NumberFieldMapper.java | 8 ++----- .../index/mapper/IpScriptFieldTypeTests.java | 4 ++-- .../unsignedlong/UnsignedLongFieldMapper.java | 6 +---- .../VersionStringFieldMapper.java | 10 ++------ 12 files changed, 39 insertions(+), 67 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index 92531b4cc5c46..213f5e86b4a4d 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -233,15 +233,11 @@ public Object valueForDisplay(Object value) { @Override public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() - + "] does not support custom time zones"); - } + checkNoTimeZone(timeZone); if (format == null) { return DocValueFormat.RAW; - } else { - return new DocValueFormat.Decimal(format); } + return new DocValueFormat.Decimal(format); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 062265ad9da7e..8fcd20cd58aa2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -213,13 +213,8 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S @Override public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { - if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); - } - if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() - + "] does not support custom time zones"); - } + checkNoFormat(format); + checkNoTimeZone(timeZone); return DocValueFormat.BOOLEAN; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java index 8d26a2b382070..e349bbb9fd20d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java @@ -95,12 +95,8 @@ public Object valueForDisplay(Object value) { @Override public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); - } - if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); - } + checkNoFormat(format); + checkNoTimeZone(timeZone); return DocValueFormat.BOOLEAN; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java index af9ca71d1deb2..66a90e0fae321 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java @@ -86,9 +86,7 @@ public Object valueForDisplay(Object value) { @Override public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); - } + checkNoTimeZone(timeZone); if (format == null) { return DocValueFormat.RAW; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index a464ed9da9cd3..2631834a495fb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -378,13 +378,8 @@ public Object valueForDisplay(Object value) { @Override public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { - if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); - } - if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() - + "] does not support custom time zones"); - } + checkNoFormat(format); + checkNoTimeZone(timeZone); return DocValueFormat.IP; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java index 1f81b690a8572..dc70b14a86518 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java @@ -21,8 +21,8 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.fielddata.IpScriptFieldData; import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.script.IpFieldScript; import org.elasticsearch.script.CompositeFieldScript; +import org.elasticsearch.script.IpFieldScript; import org.elasticsearch.script.Script; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.lookup.SearchLookup; @@ -36,7 +36,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.function.Function; import java.util.function.Supplier; @@ -88,14 +87,8 @@ public Object valueForDisplay(Object value) { @Override public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - if (format != null) { - String message = "Runtime field [%s] of type [%s] does not support custom formats"; - throw new IllegalArgumentException(String.format(Locale.ROOT, message, name(), typeName())); - } - if (timeZone != null) { - String message = "Runtime field [%s] of type [%s] does not support custom time zones"; - throw new IllegalArgumentException(String.format(Locale.ROOT, message, name(), typeName())); - } + checkNoFormat(format); + checkNoTimeZone(timeZone); return DocValueFormat.IP; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java index ecd50a3477dc8..8c14a660a32c3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java @@ -83,9 +83,7 @@ public Object valueForDisplay(Object value) { @Override public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); - } + checkNoTimeZone(timeZone); if (format == null) { return DocValueFormat.RAW; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 0d5edfa025980..e7f2349984121 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -373,17 +373,32 @@ public boolean eagerGlobalOrdinals() { return false; } - /** Return a {@link DocValueFormat} that can be used to display and parse - * values as returned by the fielddata API. - * The default implementation returns a {@link DocValueFormat#RAW}. */ + /** + * Pick a {@link DocValueFormat} that can be used to display and parse + * values of fields of this type. + */ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { + checkNoFormat(format); + checkNoTimeZone(timeZone); + return DocValueFormat.RAW; + } + + /** + * Validate the provided {@code format} is null. + */ + protected void checkNoFormat(@Nullable String format) { if (format != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); } + } + + /** + * Validate the provided {@code timeZone} is null. + */ + protected void checkNoTimeZone(@Nullable ZoneId timeZone) { if (timeZone != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); } - return DocValueFormat.RAW; } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index e0967504bbf80..ced335a4f68c1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -1060,15 +1060,11 @@ protected Object parseSourceValue(Object value) { @Override public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() - + "] does not support custom time zones"); - } + checkNoTimeZone(timeZone); if (format == null) { return DocValueFormat.RAW; - } else { - return new DocValueFormat.Decimal(format); } + return new DocValueFormat.Decimal(format); } public Number parsePoint(byte[] value) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index a5eddb59fabae..9cba03e6e3468 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -53,9 +53,9 @@ public class IpScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase { public void testFormat() throws IOException { assertThat(simpleMappedFieldType().docValueFormat(null, null), sameInstance(DocValueFormat.IP)); Exception e = expectThrows(IllegalArgumentException.class, () -> simpleMappedFieldType().docValueFormat("ASDFA", null)); - assertThat(e.getMessage(), equalTo("Runtime field [test] of type [ip] does not support custom formats")); + assertThat(e.getMessage(), equalTo("Field [test] of type [ip] does not support custom formats")); e = expectThrows(IllegalArgumentException.class, () -> simpleMappedFieldType().docValueFormat(null, ZoneId.of("America/New_York"))); - assertThat(e.getMessage(), equalTo("Runtime field [test] of type [ip] does not support custom time zones")); + assertThat(e.getMessage(), equalTo("Field [test] of type [ip] does not support custom time zones")); } @Override diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 22acedcbd49a1..a9d17eb5c251b 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -270,11 +270,7 @@ public Object valueForDisplay(Object value) { @Override public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - if (timeZone != null) { - throw new IllegalArgumentException( - "Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones" - ); - } + checkNoTimeZone(timeZone); return DocValueFormat.UNSIGNED_LONG_SHIFTED; } diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java index dbdfc26e75aad..71234d31532f9 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java @@ -292,14 +292,8 @@ public Object valueForDisplay(Object value) { @Override public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { - if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); - } - if (timeZone != null) { - throw new IllegalArgumentException( - "Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones" - ); - } + checkNoFormat(format); + checkNoTimeZone(timeZone); return VERSION_DOCVALUE; } From 3c589efef7984e5c0d0cbb9f65bc291e4c95e025 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 1 Sep 2021 09:30:53 -0400 Subject: [PATCH 064/128] Use a real out of memory error in die with dignity (#77039) Today when testing dying with dignity, we simply throw an OutOfMemoryError. We know this should not get caught by any intermediate code and end up in the uncaught exception handler. This allows us to test that this exception handler is able to successfully kill the VM. However, it is on the table to no longer use the uncaught exception handler, but instead the built-in support for ExitOnOutOfMemoryError. A fake OutOfMemoryError would not be processed by this handler, so to prepare the way, we switch to using a real OutOfMemoryError. --- .../qa/die_with_dignity/DieWithDignityIT.java | 2 +- .../RestDieWithDignityAction.java | 22 ++++++++++++++++++- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/qa/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java b/qa/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java index 58a4ca0c6a538..dc6deac379117 100644 --- a/qa/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java +++ b/qa/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java @@ -62,7 +62,7 @@ public void testDieWithDignity() throws Exception { ".*ElasticsearchUncaughtExceptionHandler.*", ".*javaRestTest-0.*", ".*fatal error in thread \\[Thread-\\d+\\], exiting.*", - ".*java.lang.OutOfMemoryError: die with dignity.*" + ".*java.lang.OutOfMemoryError: Requested array size exceeds VM limit.*" )) { fatalErrorInThreadExiting = true; } diff --git a/qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java b/qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java index 82288f0a73364..0835d3df5a21d 100644 --- a/qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java +++ b/qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java @@ -9,8 +9,12 @@ package org.elasticsearch; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import java.util.List; @@ -32,7 +36,23 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - throw new OutOfMemoryError("die with dignity"); + return channel -> { + /* + * This is to force the size of the array to be non-deterministic so that a sufficiently smart compiler can not optimize away + * getting the length of the array to a constant. + */ + final int length = Randomness.get().nextBoolean() ? Integer.MAX_VALUE - 1 : Integer.MAX_VALUE; + final long[] array = new long[length]; + // this is to force the array to be consumed so that it can not be optimized away by a sufficiently smart compiler + try (XContentBuilder builder = channel.newBuilder()) { + builder.startObject(); + { + builder.field("length", array.length); + } + builder.endObject(); + channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder)); + } + }; } } From 33c4129a60fb27250c0cd8e88b32150cf97715d0 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 1 Sep 2021 14:32:24 +0100 Subject: [PATCH 065/128] Revert "Reapply "Handle cgroups v2 in `OsProbe` (#76883)" (#77106)" This reverts commit 54d4737b23f0bb468ace66b249f272d7fea1df59. --- .../packaging/test/DockerTests.java | 1 + .../org/elasticsearch/monitor/os/OsProbe.java | 263 ++++-------------- .../elasticsearch/bootstrap/security.policy | 7 - .../monitor/os/OsProbeTests.java | 125 ++++----- 4 files changed, 103 insertions(+), 293 deletions(-) diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index 45689f0fed691..d5fd91a427bac 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -849,6 +849,7 @@ public void test131InitProcessHasCorrectPID() { /** * Check that Elasticsearch reports per-node cgroup information. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/76812") public void test140CgroupOsStatsAreAvailable() throws Exception { waitForElasticsearch(installation, USERNAME, PASSWORD); diff --git a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java index 87a068f8ba7ff..a20b7c7e27396 100644 --- a/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java +++ b/server/src/main/java/org/elasticsearch/monitor/os/OsProbe.java @@ -30,31 +30,25 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; -import java.util.stream.Stream; /** * The {@link OsProbe} class retrieves information about the physical and swap size of the machine * memory, as well as the system load average and cpu load. * - *

In some exceptional cases, it's possible the underlying native methods used by + * In some exceptional cases, it's possible the underlying native methods used by * {@link #getFreePhysicalMemorySize()}, {@link #getTotalPhysicalMemorySize()}, * {@link #getFreeSwapSpaceSize()}, and {@link #getTotalSwapSpaceSize()} can return a * negative value. Because of this, we prevent those methods from returning negative values, * returning 0 instead. * - *

The OS can report a negative number in a number of cases: - * - *

    - *
  • Non-supported OSes (HP-UX, or AIX) - *
  • A failure of macOS to initialize host statistics - *
  • An OS that does not support the {@code _SC_PHYS_PAGES} or {@code _SC_PAGE_SIZE} flags for the {@code sysconf()} linux kernel call - *
  • An overflow of the product of {@code _SC_PHYS_PAGES} and {@code _SC_PAGE_SIZE} - *
  • An error case retrieving these values from a linux kernel - *
  • A non-standard libc implementation not implementing the required values - *
- * - *

For a more exhaustive explanation, see https://github.com/elastic/elasticsearch/pull/42725 + * The OS can report a negative number in a number of cases: + * - Non-supported OSes (HP-UX, or AIX) + * - A failure of macOS to initialize host statistics + * - An OS that does not support the {@code _SC_PHYS_PAGES} or {@code _SC_PAGE_SIZE} flags for the {@code sysconf()} linux kernel call + * - An overflow of the product of {@code _SC_PHYS_PAGES} and {@code _SC_PAGE_SIZE} + * - An error case retrieving these values from a linux kernel + * - A non-standard libc implementation not implementing the required values + * For a more exhaustive explanation, see https://github.com/elastic/elasticsearch/pull/42725 */ public class OsProbe { @@ -184,7 +178,7 @@ final double[] getSystemLoadAverage() { final String procLoadAvg = readProcLoadavg(); assert procLoadAvg.matches("(\\d+\\.\\d+\\s+){3}\\d+/\\d+\\s+\\d+"); final String[] fields = procLoadAvg.split("\\s+"); - return new double[] { Double.parseDouble(fields[0]), Double.parseDouble(fields[1]), Double.parseDouble(fields[2]) }; + return new double[]{Double.parseDouble(fields[0]), Double.parseDouble(fields[1]), Double.parseDouble(fields[2])}; } catch (final IOException e) { if (logger.isDebugEnabled()) { logger.debug("error reading /proc/loadavg", e); @@ -198,7 +192,7 @@ final double[] getSystemLoadAverage() { } try { final double oneMinuteLoadAverage = (double) getSystemLoadAverage.invoke(osMxBean); - return new double[] { oneMinuteLoadAverage >= 0 ? oneMinuteLoadAverage : -1, -1, -1 }; + return new double[]{oneMinuteLoadAverage >= 0 ? oneMinuteLoadAverage : -1, -1, -1}; } catch (IllegalAccessException | InvocationTargetException e) { if (logger.isDebugEnabled()) { logger.debug("error reading one minute load average from operating system", e); @@ -324,23 +318,6 @@ String readSysFsCgroupCpuAcctCpuAcctUsage(final String controlGroup) throws IOEx return readSingleLine(PathUtils.get("/sys/fs/cgroup/cpuacct", controlGroup, "cpuacct.usage")); } - private long[] getCgroupV2CpuLimit(String controlGroup) throws IOException { - String entry = readCgroupV2CpuLimit(controlGroup); - String[] parts = entry.split("\\s+"); - assert parts.length == 2 : "Expected 2 fields in [cpu.max]"; - - long[] values = new long[2]; - - values[0] = "max".equals(parts[0]) ? -1L : Long.parseLong(parts[0]); - values[1] = Long.parseLong(parts[1]); - return values; - } - - @SuppressForbidden(reason = "access /sys/fs/cgroup/cpu.max") - String readCgroupV2CpuLimit(String controlGroup) throws IOException { - return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "cpu.max")); - } - /** * The total period of time in microseconds for how frequently the Elasticsearch control group's access to CPU resources will be * reallocated. @@ -477,35 +454,6 @@ String readSysFsCgroupMemoryLimitInBytes(final String controlGroup) throws IOExc return readSingleLine(PathUtils.get("/sys/fs/cgroup/memory", controlGroup, "memory.limit_in_bytes")); } - /** - * The maximum amount of user memory (including file cache). - * If there is no limit then some Linux versions return the maximum value that can be stored in an - * unsigned 64 bit number, and this will overflow a long, hence the result type is String. - * (The alternative would have been BigInteger but then it would not be possible to index - * the OS stats document into Elasticsearch without losing information, as BigInteger is - * not a supported Elasticsearch type.) - * - * @param controlGroup the control group for the Elasticsearch process for the {@code memory} subsystem - * @return the maximum amount of user memory (including file cache) - * @throws IOException if an I/O exception occurs reading {@code memory.limit_in_bytes} for the control group - */ - private String getCgroupV2MemoryLimitInBytes(final String controlGroup) throws IOException { - return readSysFsCgroupV2MemoryLimitInBytes(controlGroup); - } - - /** - * Returns the line from {@code memory.max} for the control group to which the Elasticsearch process belongs for the - * {@code memory} subsystem. This line represents the maximum amount of user memory (including file cache). - * - * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem - * @return the line from {@code memory.max} - * @throws IOException if an I/O exception occurs reading {@code memory.max} for the control group - */ - @SuppressForbidden(reason = "access /sys/fs/cgroup/memory.max") - String readSysFsCgroupV2MemoryLimitInBytes(final String controlGroup) throws IOException { - return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "memory.max")); - } - /** * The total current memory usage by processes in the cgroup (in bytes). * If there is no limit then some Linux versions return the maximum value that can be stored in an @@ -535,35 +483,6 @@ String readSysFsCgroupMemoryUsageInBytes(final String controlGroup) throws IOExc return readSingleLine(PathUtils.get("/sys/fs/cgroup/memory", controlGroup, "memory.usage_in_bytes")); } - /** - * The total current memory usage by processes in the cgroup (in bytes). - * If there is no limit then some Linux versions return the maximum value that can be stored in an - * unsigned 64 bit number, and this will overflow a long, hence the result type is String. - * (The alternative would have been BigInteger but then it would not be possible to index - * the OS stats document into Elasticsearch without losing information, as BigInteger is - * not a supported Elasticsearch type.) - * - * @param controlGroup the control group for the Elasticsearch process for the {@code memory} subsystem - * @return the total current memory usage by processes in the cgroup (in bytes) - * @throws IOException if an I/O exception occurs reading {@code memory.current} for the control group - */ - private String getCgroupV2MemoryUsageInBytes(final String controlGroup) throws IOException { - return readSysFsCgroupV2MemoryUsageInBytes(controlGroup); - } - - /** - * Returns the line from {@code memory.current} for the control group to which the Elasticsearch process belongs for the - * {@code memory} subsystem. This line represents the total current memory usage by processes in the cgroup (in bytes). - * - * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem - * @return the line from {@code memory.current} - * @throws IOException if an I/O exception occurs reading {@code memory.current} for the control group - */ - @SuppressForbidden(reason = "access /sys/fs/cgroup/memory.current") - String readSysFsCgroupV2MemoryUsageInBytes(final String controlGroup) throws IOException { - return readSingleLine(PathUtils.get("/sys/fs/cgroup/", controlGroup, "memory.current")); - } - /** * Checks if cgroup stats are available by checking for the existence of {@code /proc/self/cgroup}, {@code /sys/fs/cgroup/cpu}, * {@code /sys/fs/cgroup/cpuacct} and {@code /sys/fs/cgroup/memory}. @@ -571,51 +490,20 @@ String readSysFsCgroupV2MemoryUsageInBytes(final String controlGroup) throws IOE * @return {@code true} if the stats are available, otherwise {@code false} */ @SuppressForbidden(reason = "access /proc/self/cgroup, /sys/fs/cgroup/cpu, /sys/fs/cgroup/cpuacct and /sys/fs/cgroup/memory") - boolean areCgroupStatsAvailable() throws IOException { + boolean areCgroupStatsAvailable() { if (Files.exists(PathUtils.get("/proc/self/cgroup")) == false) { return false; } - - List lines = readProcSelfCgroup(); - - // cgroup v2 - if (lines.size() == 1 && lines.get(0).startsWith("0::")) { - return Stream.of("/sys/fs/cgroup/cpu.stat", "/sys/fs/cgroup/memory.stat").allMatch(path -> Files.exists(PathUtils.get(path))); + if (Files.exists(PathUtils.get("/sys/fs/cgroup/cpu")) == false) { + return false; } - - return Stream.of("/sys/fs/cgroup/cpu", "/sys/fs/cgroup/cpuacct", "/sys/fs/cgroup/memory") - .allMatch(path -> Files.exists(PathUtils.get(path))); - } - - /** - * The CPU statistics for all tasks in the Elasticsearch control group. - * - * @param controlGroup the control group to which the Elasticsearch process belongs for the {@code memory} subsystem - * @return the CPU statistics - * @throws IOException if an I/O exception occurs reading {@code cpu.stat} for the control group - */ - private Map getCgroupV2CpuStats(String controlGroup) throws IOException { - final List lines = readCgroupV2CpuStats(controlGroup); - final Map stats = new HashMap<>(); - - for (String line : lines) { - String[] parts = line.split("\\s+"); - assert parts.length == 2 : "Corrupt cpu.stat line: [" + line + "]"; - stats.put(parts[0], Long.parseLong(parts[1])); + if (Files.exists(PathUtils.get("/sys/fs/cgroup/cpuacct")) == false) { + return false; } - - final List expectedKeys = List.of("nr_periods", "nr_throttled", "system_usec", "throttled_usec", "usage_usec", "user_usec"); - expectedKeys.forEach(key -> { - assert stats.containsKey(key) : key; - assert stats.get(key) != -1 : stats.get(key); - }); - - return stats; - } - - @SuppressForbidden(reason = "access /sys/fs/cgroup/cpu.stat") - List readCgroupV2CpuStats(final String controlGroup) throws IOException { - return Files.readAllLines(PathUtils.get("/sys/fs/cgroup", controlGroup, "cpu.stat")); + if (Files.exists(PathUtils.get("/sys/fs/cgroup/memory")) == false) { + return false; + } + return true; } /** @@ -627,79 +515,45 @@ private OsStats.Cgroup getCgroup() { try { if (areCgroupStatsAvailable() == false) { return null; - } - - final Map controllerMap = getControlGroups(); - assert controllerMap.isEmpty() == false; - - final String cpuAcctControlGroup; - final long cgroupCpuAcctUsageNanos; - final long cgroupCpuAcctCpuCfsPeriodMicros; - final long cgroupCpuAcctCpuCfsQuotaMicros; - final String cpuControlGroup; - final OsStats.Cgroup.CpuStat cpuStat; - final String memoryControlGroup; - final String cgroupMemoryLimitInBytes; - final String cgroupMemoryUsageInBytes; - - if (controllerMap.size() == 1 && controllerMap.containsKey("")) { - // There's a single hierarchy for all controllers - cpuControlGroup = cpuAcctControlGroup = memoryControlGroup = controllerMap.get(""); - - // `cpuacct` was merged with `cpu` in v2 - final Map cpuStatsMap = getCgroupV2CpuStats(cpuControlGroup); - - cgroupCpuAcctUsageNanos = cpuStatsMap.get("usage_usec"); - - long[] cpuLimits = getCgroupV2CpuLimit(cpuControlGroup); - cgroupCpuAcctCpuCfsQuotaMicros = cpuLimits[0]; - cgroupCpuAcctCpuCfsPeriodMicros = cpuLimits[1]; - - cpuStat = new OsStats.Cgroup.CpuStat( - cpuStatsMap.get("nr_periods"), - cpuStatsMap.get("nr_throttled"), - cpuStatsMap.get("throttled_usec") - ); - - cgroupMemoryLimitInBytes = getCgroupV2MemoryLimitInBytes(memoryControlGroup); - cgroupMemoryUsageInBytes = getCgroupV2MemoryUsageInBytes(memoryControlGroup); } else { - cpuAcctControlGroup = controllerMap.get("cpuacct"); + final Map controllerMap = getControlGroups(); + assert controllerMap.isEmpty() == false; + + final String cpuAcctControlGroup = controllerMap.get("cpuacct"); if (cpuAcctControlGroup == null) { logger.debug("no [cpuacct] data found in cgroup stats"); return null; } - cgroupCpuAcctUsageNanos = getCgroupCpuAcctUsageNanos(cpuAcctControlGroup); + final long cgroupCpuAcctUsageNanos = getCgroupCpuAcctUsageNanos(cpuAcctControlGroup); - cpuControlGroup = controllerMap.get("cpu"); + final String cpuControlGroup = controllerMap.get("cpu"); if (cpuControlGroup == null) { logger.debug("no [cpu] data found in cgroup stats"); return null; } - cgroupCpuAcctCpuCfsPeriodMicros = getCgroupCpuAcctCpuCfsPeriodMicros(cpuControlGroup); - cgroupCpuAcctCpuCfsQuotaMicros = getCgroupCpuAcctCpuCfsQuotaMicros(cpuControlGroup); - cpuStat = getCgroupCpuAcctCpuStat(cpuControlGroup); + final long cgroupCpuAcctCpuCfsPeriodMicros = getCgroupCpuAcctCpuCfsPeriodMicros(cpuControlGroup); + final long cgroupCpuAcctCpuCfsQuotaMicros = getCgroupCpuAcctCpuCfsQuotaMicros(cpuControlGroup); + final OsStats.Cgroup.CpuStat cpuStat = getCgroupCpuAcctCpuStat(cpuControlGroup); - memoryControlGroup = controllerMap.get("memory"); + final String memoryControlGroup = controllerMap.get("memory"); if (memoryControlGroup == null) { logger.debug("no [memory] data found in cgroup stats"); return null; } - cgroupMemoryLimitInBytes = getCgroupMemoryLimitInBytes(memoryControlGroup); - cgroupMemoryUsageInBytes = getCgroupMemoryUsageInBytes(memoryControlGroup); + final String cgroupMemoryLimitInBytes = getCgroupMemoryLimitInBytes(memoryControlGroup); + final String cgroupMemoryUsageInBytes = getCgroupMemoryUsageInBytes(memoryControlGroup); + + return new OsStats.Cgroup( + cpuAcctControlGroup, + cgroupCpuAcctUsageNanos, + cpuControlGroup, + cgroupCpuAcctCpuCfsPeriodMicros, + cgroupCpuAcctCpuCfsQuotaMicros, + cpuStat, + memoryControlGroup, + cgroupMemoryLimitInBytes, + cgroupMemoryUsageInBytes); } - - return new OsStats.Cgroup( - cpuAcctControlGroup, - cgroupCpuAcctUsageNanos, - cpuControlGroup, - cgroupCpuAcctCpuCfsPeriodMicros, - cgroupCpuAcctCpuCfsQuotaMicros, - cpuStat, - memoryControlGroup, - cgroupMemoryLimitInBytes, - cgroupMemoryUsageInBytes - ); } catch (final IOException e) { logger.debug("error reading control group stats", e); return null; @@ -722,14 +576,13 @@ public static OsProbe getInstance() { OsInfo osInfo(long refreshInterval, int allocatedProcessors) throws IOException { return new OsInfo( - refreshInterval, - Runtime.getRuntime().availableProcessors(), - allocatedProcessors, - Constants.OS_NAME, - getPrettyName(), - Constants.OS_ARCH, - Constants.OS_VERSION - ); + refreshInterval, + Runtime.getRuntime().availableProcessors(), + allocatedProcessors, + Constants.OS_NAME, + getPrettyName(), + Constants.OS_ARCH, + Constants.OS_VERSION); } private String getPrettyName() throws IOException { @@ -741,13 +594,11 @@ private String getPrettyName() throws IOException { * wrapped in single- or double-quotes. */ final List etcOsReleaseLines = readOsRelease(); - final List prettyNameLines = etcOsReleaseLines.stream() - .filter(line -> line.startsWith("PRETTY_NAME")) - .collect(Collectors.toList()); + final List prettyNameLines = + etcOsReleaseLines.stream().filter(line -> line.startsWith("PRETTY_NAME")).collect(Collectors.toList()); assert prettyNameLines.size() <= 1 : prettyNameLines; - final Optional maybePrettyNameLine = prettyNameLines.size() == 1 - ? Optional.of(prettyNameLines.get(0)) - : Optional.empty(); + final Optional maybePrettyNameLine = + prettyNameLines.size() == 1 ? Optional.of(prettyNameLines.get(0)) : Optional.empty(); if (maybePrettyNameLine.isPresent()) { // we trim since some OS contain trailing space, for example, Oracle Linux Server 6.9 has a trailing space after the quote final String trimmedPrettyNameLine = maybePrettyNameLine.get().trim(); @@ -844,15 +695,11 @@ boolean isDebian8() throws IOException { return Constants.LINUX && getPrettyName().equals("Debian GNU/Linux 8 (jessie)"); } - OsStats.Cgroup getCgroup(boolean isLinux) { - return isLinux ? getCgroup() : null; - } - public OsStats osStats() { final OsStats.Cpu cpu = new OsStats.Cpu(getSystemCpuPercent(), getSystemLoadAverage()); final OsStats.Mem mem = new OsStats.Mem(getTotalPhysicalMemorySize(), getFreePhysicalMemorySize()); final OsStats.Swap swap = new OsStats.Swap(getTotalSwapSpaceSize(), getFreeSwapSpaceSize()); - final OsStats.Cgroup cgroup = getCgroup(Constants.LINUX); + final OsStats.Cgroup cgroup = Constants.LINUX ? getCgroup() : null; return new OsStats(System.currentTimeMillis(), cpu, mem, swap, cgroup); } diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy index 56d9f45c61a16..8ffc0d0eea47d 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -153,13 +153,6 @@ grant { permission java.io.FilePermission "/sys/fs/cgroup/memory", "read"; permission java.io.FilePermission "/sys/fs/cgroup/memory/-", "read"; - // control group v2 stats on linux - permission java.io.FilePermission "/sys/fs/cgroup/cpu.max", "read"; - permission java.io.FilePermission "/sys/fs/cgroup/cpu.stat", "read"; - permission java.io.FilePermission "/sys/fs/cgroup/memory.current", "read"; - permission java.io.FilePermission "/sys/fs/cgroup/memory.max", "read"; - permission java.io.FilePermission "/sys/fs/cgroup/memory.stat", "read"; - // system memory on Linux systems affected by JDK bug (#66629) permission java.io.FilePermission "/proc/meminfo", "read"; }; diff --git a/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index ac802cf738500..8be3723d72cc3 100644 --- a/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -43,7 +43,7 @@ public void testOsInfo() throws IOException { final OsProbe osProbe = new OsProbe() { @Override - List readOsRelease() { + List readOsRelease() throws IOException { assert Constants.LINUX : Constants.OS_NAME; if (prettyName != null) { final String quote = randomFrom("\"", "'", ""); @@ -78,10 +78,8 @@ public void testOsStats() { OsStats stats = osProbe.osStats(); assertNotNull(stats); assertThat(stats.getTimestamp(), greaterThan(0L)); - assertThat( - stats.getCpu().getPercent(), - anyOf(equalTo((short) -1), is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100)))) - ); + assertThat(stats.getCpu().getPercent(), anyOf(equalTo((short) -1), + is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100))))); double[] loadAverage = stats.getCpu().getLoadAverage(); if (loadAverage != null) { assertThat(loadAverage.length, equalTo(3)); @@ -175,14 +173,16 @@ String readProcLoadavg() { } public void testCgroupProbe() { - final int availableCgroupsVersion = randomFrom(0, 1, 2); + assumeTrue("test runs on Linux only", Constants.LINUX); + + final boolean areCgroupStatsAvailable = randomBoolean(); final String hierarchy = randomAlphaOfLength(16); - final OsProbe probe = buildStubOsProbe(availableCgroupsVersion, hierarchy); + final OsProbe probe = buildStubOsProbe(areCgroupStatsAvailable, hierarchy); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); - if (availableCgroupsVersion > 0) { + if (areCgroupStatsAvailable) { assertNotNull(cgroup); assertThat(cgroup.getCpuAcctControlGroup(), equalTo("/" + hierarchy)); assertThat(cgroup.getCpuAcctUsageNanos(), equalTo(364869866063112L)); @@ -200,14 +200,17 @@ public void testCgroupProbe() { } public void testCgroupProbeWithMissingCpuAcct() { + assumeTrue("test runs on Linux only", Constants.LINUX); + final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about cpuacct - List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() + List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) + .stream() .map(line -> line.replaceFirst(",cpuacct", "")) .collect(Collectors.toList()); - final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); + final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -215,14 +218,18 @@ public void testCgroupProbeWithMissingCpuAcct() { } public void testCgroupProbeWithMissingCpu() { + assumeTrue("test runs on Linux only", Constants.LINUX); + final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about cpu - List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() + List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) + .stream() .map(line -> line.replaceFirst(":cpu,", ":")) .collect(Collectors.toList()); - final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); + + final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -230,14 +237,17 @@ public void testCgroupProbeWithMissingCpu() { } public void testCgroupProbeWithMissingMemory() { + assumeTrue("test runs on Linux only", Constants.LINUX); + final String hierarchy = randomAlphaOfLength(16); // This cgroup data is missing a line about memory - List procSelfCgroupLines = getProcSelfGroupLines(1, hierarchy).stream() + List procSelfCgroupLines = getProcSelfGroupLines(hierarchy) + .stream() .filter(line -> line.contains(":memory:") == false) .collect(Collectors.toList()); - final OsProbe probe = buildStubOsProbe(1, hierarchy, procSelfCgroupLines); + final OsProbe probe = buildStubOsProbe(true, hierarchy, procSelfCgroupLines); final OsStats.Cgroup cgroup = probe.osStats().getCgroup(); @@ -245,8 +255,6 @@ public void testCgroupProbeWithMissingMemory() { } public void testGetTotalMemFromProcMeminfo() throws Exception { - int cgroupsVersion = randomFrom(1, 2); - // missing MemTotal line var meminfoLines = Arrays.asList( "MemFree: 8467692 kB", @@ -257,7 +265,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - OsProbe probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); + OsProbe probe = buildStubOsProbe(true, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with invalid value @@ -271,7 +279,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); + probe = buildStubOsProbe(true, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with invalid unit @@ -285,7 +293,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); + probe = buildStubOsProbe(true, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(0L)); // MemTotal line with random valid value @@ -300,7 +308,7 @@ public void testGetTotalMemFromProcMeminfo() throws Exception { "Active: 43637908 kB", "Inactive: 8130280 kB" ); - probe = buildStubOsProbe(cgroupsVersion, "", List.of(), meminfoLines); + probe = buildStubOsProbe(true, "", List.of(), meminfoLines); assertThat(probe.getTotalMemFromProcMeminfo(), equalTo(memTotalInKb * 1024L)); } @@ -311,13 +319,7 @@ public void testGetTotalMemoryOnDebian8() throws Exception { assertThat(osProbe.getTotalPhysicalMemorySize(), greaterThan(0L)); } - private static List getProcSelfGroupLines(int cgroupsVersion, String hierarchy) { - // It doesn't really matter if cgroupsVersion == 0 here - - if (cgroupsVersion == 2) { - return List.of("0::/" + hierarchy); - } - + private static List getProcSelfGroupLines(String hierarchy) { return Arrays.asList( "10:freezer:/", "9:net_cls,net_prio:/", @@ -329,40 +331,32 @@ private static List getProcSelfGroupLines(int cgroupsVersion, String hie "3:perf_event:/", "2:cpu,cpuacct,cpuset:/" + hierarchy, "1:name=systemd:/user.slice/user-1000.slice/session-2359.scope", - "0::/cgroup2" - ); + "0::/cgroup2"); } - private static OsProbe buildStubOsProbe(final int availableCgroupsVersion, final String hierarchy) { - List procSelfCgroupLines = getProcSelfGroupLines(availableCgroupsVersion, hierarchy); + private static OsProbe buildStubOsProbe(final boolean areCgroupStatsAvailable, final String hierarchy) { + List procSelfCgroupLines = getProcSelfGroupLines(hierarchy); - return buildStubOsProbe(availableCgroupsVersion, hierarchy, procSelfCgroupLines); + return buildStubOsProbe(areCgroupStatsAvailable, hierarchy, procSelfCgroupLines); } /** * Builds a test instance of OsProbe. Methods that ordinarily read from the filesystem are overridden to return values based upon * the arguments to this method. * - * @param availableCgroupsVersion what version of cgroups are available, 1 or 2, or 0 for no cgroups. Normally OsProbe establishes this - * for itself. + * @param areCgroupStatsAvailable whether or not cgroup data is available. Normally OsProbe establishes this for itself. * @param hierarchy a mock value used to generate a cgroup hierarchy. * @param procSelfCgroupLines the lines that will be used as the content of /proc/self/cgroup * @param procMeminfoLines lines that will be used as the content of /proc/meminfo * @return a test instance */ private static OsProbe buildStubOsProbe( - final int availableCgroupsVersion, + final boolean areCgroupStatsAvailable, final String hierarchy, List procSelfCgroupLines, List procMeminfoLines ) { return new OsProbe() { - @Override - OsStats.Cgroup getCgroup(boolean isLinux) { - // Pretend we're always on Linux so that we can run the cgroup tests - return super.getCgroup(true); - } - @Override List readProcSelfCgroup() { return procSelfCgroupLines; @@ -388,7 +382,10 @@ String readSysFsCgroupCpuAcctCpuAcctCfsQuota(String controlGroup) { @Override List readSysFsCgroupCpuAcctCpuStat(String controlGroup) { - return Arrays.asList("nr_periods 17992", "nr_throttled 1311", "throttled_time 139298645489"); + return Arrays.asList( + "nr_periods 17992", + "nr_throttled 1311", + "throttled_time 139298645489"); } @Override @@ -406,50 +403,22 @@ String readSysFsCgroupMemoryUsageInBytes(String controlGroup) { @Override boolean areCgroupStatsAvailable() { - return availableCgroupsVersion > 0; + return areCgroupStatsAvailable; } @Override - List readProcMeminfo() { + List readProcMeminfo() throws IOException { return procMeminfoLines; } - - @Override - String readSysFsCgroupV2MemoryLimitInBytes(String controlGroup) { - assertThat(controlGroup, equalTo("/" + hierarchy)); - // This is the highest value that can be stored in an unsigned 64 bit number, hence too big for long - return "18446744073709551615"; - } - - @Override - String readSysFsCgroupV2MemoryUsageInBytes(String controlGroup) { - assertThat(controlGroup, equalTo("/" + hierarchy)); - return "4796416"; - } - - @Override - List readCgroupV2CpuStats(String controlGroup) { - assertThat(controlGroup, equalTo("/" + hierarchy)); - return List.of( - "usage_usec 364869866063112", - "user_usec 34636", - "system_usec 9896", - "nr_periods 17992", - "nr_throttled 1311", - "throttled_usec 139298645489" - ); - } - - @Override - String readCgroupV2CpuLimit(String controlGroup) { - assertThat(controlGroup, equalTo("/" + hierarchy)); - return "50000 100000"; - } }; } - private static OsProbe buildStubOsProbe(final int availableCgroupsVersion, final String hierarchy, List procSelfCgroupLines) { - return buildStubOsProbe(availableCgroupsVersion, hierarchy, procSelfCgroupLines, List.of()); + private static OsProbe buildStubOsProbe( + final boolean areCgroupStatsAvailable, + final String hierarchy, + List procSelfCgroupLines + ) { + return buildStubOsProbe(areCgroupStatsAvailable, hierarchy, procSelfCgroupLines, List.of()); } } From be6d1ba2af3375af177db1a32045ac59b73fad88 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 1 Sep 2021 15:45:41 +0100 Subject: [PATCH 066/128] Rework how we render groovy templates for docs (#77125) On Windows, rendered Groovy templates contain carriage returns, which breaks the unit tests and results in them sneaking into the output. Fix this by rendering into a string and removing the carriage returns. --- .../release/BreakingChangesGenerator.java | 20 ++------- .../release/ReleaseHighlightsGenerator.java | 17 +++----- .../release/ReleaseNotesGenerator.java | 18 ++------ .../release/ReleaseNotesIndexGenerator.java | 14 ++---- .../internal/release/TemplateUtils.java | 43 +++++++++++++++++++ .../release/ValidateChangelogEntryTask.java | 12 +++--- .../release/BreakingChangesGeneratorTest.java | 5 +-- .../release/GenerateReleaseNotesTaskTest.java | 6 ++- .../ReleaseHighlightsGeneratorTest.java | 5 +-- .../release/ReleaseNotesGeneratorTest.java | 5 +-- .../ReleaseNotesIndexGeneratorTest.java | 5 +-- 11 files changed, 74 insertions(+), 76 deletions(-) create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/TemplateUtils.java diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java index 0b0008a44a538..fc33c288cf944 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java @@ -8,8 +8,6 @@ package org.elasticsearch.gradle.internal.release; -import groovy.text.SimpleTemplateEngine; - import com.google.common.annotations.VisibleForTesting; import org.elasticsearch.gradle.VersionProperties; @@ -17,7 +15,6 @@ import java.io.File; import java.io.FileWriter; import java.io.IOException; -import java.io.Writer; import java.nio.file.Files; import java.util.HashMap; import java.util.List; @@ -36,18 +33,14 @@ public class BreakingChangesGenerator { static void update(File templateFile, File outputFile, List entries) throws IOException { try (FileWriter output = new FileWriter(outputFile)) { - generateFile( - QualifiedVersion.of(VersionProperties.getElasticsearch()), - Files.readString(templateFile.toPath()), - output, - entries + output.write( + generateFile(QualifiedVersion.of(VersionProperties.getElasticsearch()), Files.readString(templateFile.toPath()), entries) ); } } @VisibleForTesting - static void generateFile(QualifiedVersion version, String template, Writer outputWriter, List entries) - throws IOException { + static String generateFile(QualifiedVersion version, String template, List entries) throws IOException { final Map>> breakingChangesByNotabilityByArea = entries.stream() .map(ChangelogEntry::getBreaking) @@ -75,11 +68,6 @@ static void generateFile(QualifiedVersion version, String template, Writer outpu bindings.put("nextMajor", (version.getMajor() + 1) + ".0"); bindings.put("version", version); - try { - final SimpleTemplateEngine engine = new SimpleTemplateEngine(); - engine.createTemplate(template).make(bindings).writeTo(outputWriter); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } + return TemplateUtils.render(template, bindings); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGenerator.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGenerator.java index 97a210657f9c8..e8e807f301a2c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGenerator.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGenerator.java @@ -8,8 +8,6 @@ package org.elasticsearch.gradle.internal.release; -import groovy.text.SimpleTemplateEngine; - import com.google.common.annotations.VisibleForTesting; import org.elasticsearch.gradle.VersionProperties; @@ -17,7 +15,6 @@ import java.io.File; import java.io.FileWriter; import java.io.IOException; -import java.io.Writer; import java.nio.file.Files; import java.util.ArrayList; import java.util.HashMap; @@ -32,13 +29,14 @@ public class ReleaseHighlightsGenerator { static void update(File templateFile, File outputFile, List entries) throws IOException { try (FileWriter output = new FileWriter(outputFile)) { - generateFile(QualifiedVersion.of(VersionProperties.getElasticsearch()), Files.readString(templateFile.toPath()), entries, output); + output.write( + generateFile(QualifiedVersion.of(VersionProperties.getElasticsearch()), Files.readString(templateFile.toPath()), entries) + ); } } @VisibleForTesting - static void generateFile(QualifiedVersion version, String templateFile, List entries, Writer outputWriter) - throws IOException { + static String generateFile(QualifiedVersion version, String template, List entries) throws IOException { final List priorVersions = new ArrayList<>(); if (version.getMinor() > 0) { @@ -66,11 +64,6 @@ static void generateFile(QualifiedVersion version, String templateFile, List> changelogs, Writer outputWriter) - throws IOException { + static String generateFile(String template, Map> changelogs) throws IOException { final var changelogsByVersionByTypeByArea = buildChangelogBreakdown(changelogs); final Map bindings = new HashMap<>(); bindings.put("changelogsByVersionByTypeByArea", changelogsByVersionByTypeByArea); bindings.put("TYPE_LABELS", TYPE_LABELS); - try { - final SimpleTemplateEngine engine = new SimpleTemplateEngine(); - engine.createTemplate(template).make(bindings).writeTo(outputWriter); - } catch (ClassNotFoundException e) { - throw new GradleException("Failed to generate file from template", e); - } + return TemplateUtils.render(template, bindings); } private static Map>>> buildChangelogBreakdown( diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGenerator.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGenerator.java index 2f2e013027759..839c318f32784 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGenerator.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGenerator.java @@ -8,14 +8,11 @@ package org.elasticsearch.gradle.internal.release; -import groovy.text.SimpleTemplateEngine; - import com.google.common.annotations.VisibleForTesting; import java.io.File; import java.io.FileWriter; import java.io.IOException; -import java.io.Writer; import java.nio.file.Files; import java.util.HashMap; import java.util.List; @@ -34,12 +31,12 @@ public class ReleaseNotesIndexGenerator { static void update(Set versions, File indexTemplate, File indexFile) throws IOException { try (FileWriter indexFileWriter = new FileWriter(indexFile)) { - generateFile(versions, Files.readString(indexTemplate.toPath()), indexFileWriter); + indexFileWriter.write(generateFile(versions, Files.readString(indexTemplate.toPath()))); } } @VisibleForTesting - static void generateFile(Set versionsSet, String indexTemplate, Writer outputWriter) throws IOException { + static String generateFile(Set versionsSet, String template) throws IOException { final Set versions = new TreeSet<>(reverseOrder()); // For the purpose of generating the index, snapshot versions are the same as released versions. Prerelease versions are not. @@ -54,11 +51,6 @@ static void generateFile(Set versionsSet, String indexTemplate bindings.put("versions", versions); bindings.put("includeVersions", includeVersions); - try { - final SimpleTemplateEngine engine = new SimpleTemplateEngine(); - engine.createTemplate(indexTemplate).make(bindings).writeTo(outputWriter); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } + return TemplateUtils.render(template, bindings); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/TemplateUtils.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/TemplateUtils.java new file mode 100644 index 0000000000000..ef2915f847950 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/TemplateUtils.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import groovy.text.SimpleTemplateEngine; + +import java.io.IOException; +import java.io.StringWriter; +import java.util.Map; + +/** + * Methods for working with Groovy templates. + */ +public class TemplateUtils { + + /** + * Applies {@code bindings} to {@code template}, then removes all carriage returns from + * the result. + * + * @param template a Groovy template + * @param bindings parameters for the template + * @return the rendered template + */ + public static String render(String template, Map bindings) throws IOException { + final StringWriter writer = new StringWriter(); + + try { + final SimpleTemplateEngine engine = new SimpleTemplateEngine(); + engine.createTemplate(template).make(bindings).writeTo(writer); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + + return writer.toString().replace("\\r", ""); + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java index 5f030eb074653..149e8411dffaa 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java @@ -17,10 +17,10 @@ import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.TaskAction; -import javax.inject.Inject; import java.net.URI; import java.util.Map; import java.util.stream.Collectors; +import javax.inject.Inject; /** * Performs additional checks on changelog files, beyond whether they conform to the schema. @@ -49,13 +49,15 @@ public void executeTask() { if (type.equals("known-issue") == false && type.equals("security") == false) { if (entry.getPr() == null) { - throw new GradleException("[" + path + "] must provide a [pr] number (only 'known-issue' and " + - "'security' entries can omit this"); + throw new GradleException( + "[" + path + "] must provide a [pr] number (only 'known-issue' and " + "'security' entries can omit this" + ); } if (entry.getArea() == null) { - throw new GradleException("[" + path + "] must provide an [area] (only 'known-issue' and " + - "'security' entries can omit this"); + throw new GradleException( + "[" + path + "] must provide an [area] (only 'known-issue' and " + "'security' entries can omit this" + ); } } diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java index 007e18c84aa50..e50cc56c27cff 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java @@ -10,7 +10,6 @@ import org.junit.Test; -import java.io.StringWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; @@ -32,12 +31,10 @@ public void generateFile_rendersCorrectMarkup() throws Exception { final String expectedOutput = getResource( "/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateFile.asciidoc" ); - final StringWriter writer = new StringWriter(); final List entries = getEntries(); // when: - BreakingChangesGenerator.generateFile(QualifiedVersion.of("8.4.0-SNAPSHOT"), template, writer, entries); - final String actualOutput = writer.toString(); + final String actualOutput = BreakingChangesGenerator.generateFile(QualifiedVersion.of("8.4.0-SNAPSHOT"), template, entries); // then: assertThat(actualOutput, equalTo(expectedOutput)); diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java index 12f2712549706..d994c7097c8fa 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java @@ -206,7 +206,11 @@ public void partitionFiles_withPrerelease_correctlyGroupsByPrereleaseVersion() { ); // when: - Map> partitionedFiles = GenerateReleaseNotesTask.partitionFilesByVersion(gitWrapper, "8.0.0-beta1", allFiles); + Map> partitionedFiles = GenerateReleaseNotesTask.partitionFilesByVersion( + gitWrapper, + "8.0.0-beta1", + allFiles + ); // then: verify(gitWrapper).listVersions("v8.0*"); diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java index 22bf411fe73b3..9c47be0151faf 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java @@ -10,7 +10,6 @@ import org.junit.Test; -import java.io.StringWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; @@ -32,12 +31,10 @@ public void generateFile_rendersCorrectMarkup() throws Exception { final String expectedOutput = getResource( "/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc" ); - final StringWriter writer = new StringWriter(); final List entries = getEntries(); // when: - ReleaseHighlightsGenerator.generateFile(QualifiedVersion.of("8.4.0-SNAPSHOT"), template, entries, writer); - final String actualOutput = writer.toString(); + final String actualOutput = ReleaseHighlightsGenerator.generateFile(QualifiedVersion.of("8.4.0-SNAPSHOT"), template, entries); // then: assertThat(actualOutput, equalTo(expectedOutput)); diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java index 2700bb8a35d37..55c81091bf49b 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.java @@ -10,7 +10,6 @@ import org.junit.Test; -import java.io.StringWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; @@ -37,12 +36,10 @@ public void generateFile_rendersCorrectMarkup() throws Exception { final String expectedOutput = getResource( "/org/elasticsearch/gradle/internal/release/ReleaseNotesGeneratorTest.generateFile.asciidoc" ); - final StringWriter writer = new StringWriter(); final Map> entries = getEntries(); // when: - ReleaseNotesGenerator.generateFile(template, entries, writer); - final String actualOutput = writer.toString(); + final String actualOutput = ReleaseNotesGenerator.generateFile(template, entries); // then: assertThat(actualOutput, equalTo(expectedOutput)); diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.java index 10ffcf41857ef..8bb3f868d9de0 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.java @@ -10,7 +10,6 @@ import org.junit.Test; -import java.io.StringWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; @@ -46,11 +45,9 @@ public void generateFile_rendersCorrectMarkup() throws Exception { final String expectedOutput = getResource( "/org/elasticsearch/gradle/internal/release/ReleaseNotesIndexGeneratorTest.generateFile.asciidoc" ); - final StringWriter writer = new StringWriter(); // when: - ReleaseNotesIndexGenerator.generateFile(versions, template, writer); - final String actualOutput = writer.toString(); + final String actualOutput = ReleaseNotesIndexGenerator.generateFile(versions, template); // then: assertThat(actualOutput, equalTo(expectedOutput)); From 2396bad7075ec11398946c5acf77598b20d2b33f Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 1 Sep 2021 08:16:59 -0700 Subject: [PATCH 067/128] Add debian 11 to pull request packaging test matrix --- .../elastic+elasticsearch+pull-request+packaging-tests-unix.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml index fd979e15413f7..037f0bca8c3a1 100644 --- a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml +++ b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml @@ -36,6 +36,7 @@ - centos-8-packaging - debian-9-packaging - debian-10-packaging + - debian-11-packaging - opensuse-15-1-packaging - oraclelinux-7-packaging - oraclelinux-8-packaging From 5d48fdc741f06205518e0f5d9374c6a0058b8248 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 1 Sep 2021 16:21:26 +0100 Subject: [PATCH 068/128] Replace Lucene DataInput/DataOutput with Elasticsearch StreamInput/StreamOutput (#77118) In a number of places, we read and write binary data into byte arrays using lucene's DataInput and DataOutput abstractions. In lucene 9 these abstractions are changing the endianness of their read/writeInt methods. To avoid dealing with this formatting change, this commit changes things to use elasticsearch StreamInput/StreamOutput abstractions instead, which have basically the same API but will preserve endianness. Relates to #73324 --- .../resources/forbidden/es-all-signatures.txt | 1 + .../queries/BinaryDocValuesRangeQuery.java | 4 +- .../action/search/TransportSearchHelper.java | 4 +- .../io/stream/ByteArrayStreamInput.java | 4 ++ .../elasticsearch/common/util/ByteUtils.java | 44 ------------------- .../plain/AbstractBinaryDVLeafFieldData.java | 4 +- .../index/mapper/BinaryFieldMapper.java | 7 ++- .../index/mapper/BinaryRangeUtil.java | 38 +++++++--------- .../elasticsearch/index/mapper/RangeType.java | 12 ++--- .../common/util/ByteUtilsTests.java | 40 ----------------- .../BinaryDvConfirmedAutomatonQuery.java | 32 +++++++------- 11 files changed, 53 insertions(+), 137 deletions(-) diff --git a/build-tools-internal/src/main/resources/forbidden/es-all-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-all-signatures.txt index 64c05ad953ab2..8426ad3c7bb98 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-all-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-all-signatures.txt @@ -54,3 +54,4 @@ java.util.concurrent.ScheduledThreadPoolExecutor#(int) java.util.concurrent.ScheduledThreadPoolExecutor#(int, java.util.concurrent.ThreadFactory) java.util.concurrent.ScheduledThreadPoolExecutor#(int, java.util.concurrent.RejectedExecutionHandler) java.util.concurrent.ScheduledThreadPoolExecutor#(int, java.util.concurrent.ThreadFactory, java.util.concurrent.RejectedExecutionHandler) + diff --git a/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java b/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java index 3cb62e8428559..194d13d4dc970 100644 --- a/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java @@ -19,8 +19,8 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; -import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; import org.elasticsearch.index.mapper.RangeType; import java.io.IOException; @@ -61,7 +61,7 @@ public Scorer scorer(LeafReaderContext context) throws IOException { final TwoPhaseIterator iterator = new TwoPhaseIterator(values) { - ByteArrayDataInput in = new ByteArrayDataInput(); + ByteArrayStreamInput in = new ByteArrayStreamInput(); BytesRef otherFrom = new BytesRef(); BytesRef otherTo = new BytesRef(); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java index de79cb0bf29ff..6d084b4fa89f9 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java @@ -8,8 +8,8 @@ package org.elasticsearch.action.search; -import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.RAMOutputStream; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; @@ -56,7 +56,7 @@ static String buildScrollId(AtomicArray searchPhase static ParsedScrollId parseScrollId(String scrollId) { try { byte[] bytes = Base64.getUrlDecoder().decode(scrollId); - ByteArrayDataInput in = new ByteArrayDataInput(bytes); + ByteArrayStreamInput in = new ByteArrayStreamInput(bytes); final boolean includeContextUUID; final String type; final String firstChunk = in.readString(); diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java index eb33a0b11fdbd..bfd727eb03b0d 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java @@ -27,6 +27,10 @@ public ByteArrayStreamInput() { reset(BytesRef.EMPTY_BYTES); } + public ByteArrayStreamInput(byte[] bytes) { + reset(bytes); + } + @Override public int read() throws IOException { return readByte() & 0xFF; diff --git a/server/src/main/java/org/elasticsearch/common/util/ByteUtils.java b/server/src/main/java/org/elasticsearch/common/util/ByteUtils.java index 5a4a16ddd885e..401bbd022dc78 100644 --- a/server/src/main/java/org/elasticsearch/common/util/ByteUtils.java +++ b/server/src/main/java/org/elasticsearch/common/util/ByteUtils.java @@ -8,9 +8,6 @@ package org.elasticsearch.common.util; -import org.apache.lucene.store.ByteArrayDataInput; -import org.apache.lucene.store.ByteArrayDataOutput; - /** Utility methods to do byte-level encoding. These methods are biased towards little-endian byte order because it is the most * common byte order and reading several bytes at once may be optimizable in the future with the help of sun.mist.Unsafe. */ @@ -85,45 +82,4 @@ public static float readFloatLE(byte[] arr, int offset) { return Float.intBitsToFloat(readIntLE(arr, offset)); } - /** Same as DataOutput#writeVLong but accepts negative values (written on 9 bytes). */ - public static void writeVLong(ByteArrayDataOutput out, long i) { - for (int k = 0; k < 8 && (i & ~0x7FL) != 0L; ++k) { - out.writeByte((byte)((i & 0x7FL) | 0x80L)); - i >>>= 7; - } - out.writeByte((byte)i); - } - - /** Same as DataOutput#readVLong but can read negative values (read on 9 bytes). */ - public static long readVLong(ByteArrayDataInput in) { - // unwinded because of hotspot bugs, see Lucene's impl - byte b = in.readByte(); - if (b >= 0) return b; - long i = b & 0x7FL; - b = in.readByte(); - i |= (b & 0x7FL) << 7; - if (b >= 0) return i; - b = in.readByte(); - i |= (b & 0x7FL) << 14; - if (b >= 0) return i; - b = in.readByte(); - i |= (b & 0x7FL) << 21; - if (b >= 0) return i; - b = in.readByte(); - i |= (b & 0x7FL) << 28; - if (b >= 0) return i; - b = in.readByte(); - i |= (b & 0x7FL) << 35; - if (b >= 0) return i; - b = in.readByte(); - i |= (b & 0x7FL) << 42; - if (b >= 0) return i; - b = in.readByte(); - i |= (b & 0x7FL) << 49; - if (b >= 0) return i; - b = in.readByte(); - i |= (b & 0xFFL) << 56; - return i; - } - } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractBinaryDVLeafFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractBinaryDVLeafFieldData.java index 3283af734a0a2..6a976fa0d47a3 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractBinaryDVLeafFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractBinaryDVLeafFieldData.java @@ -9,9 +9,9 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; import org.elasticsearch.index.fielddata.LeafFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; @@ -42,7 +42,7 @@ public SortedBinaryDocValues getBytesValues() { return new SortedBinaryDocValues() { int count; - final ByteArrayDataInput in = new ByteArrayDataInput(); + final ByteArrayStreamInput in = new ByteArrayStreamInput(); final BytesRef scratch = new BytesRef(); @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index de3b9e80a3c03..779d49d28de7c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -12,11 +12,11 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.search.Query; -import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -204,8 +204,7 @@ public BytesRef binaryValue() { try { CollectionUtils.sortAndDedup(bytesList); int size = bytesList.size(); - final byte[] bytes = new byte[totalSize + (size + 1) * 5]; - ByteArrayDataOutput out = new ByteArrayDataOutput(bytes); + BytesStreamOutput out = new BytesStreamOutput(totalSize + (size + 1) * 5); out.writeVInt(size); // write total number of values for (int i = 0; i < size; i ++) { final byte[] value = bytesList.get(i); @@ -213,7 +212,7 @@ public BytesRef binaryValue() { out.writeVInt(valueLength); out.writeBytes(value, 0, valueLength); } - return new BytesRef(bytes, 0, out.getPosition()); + return out.bytes().toBytesRef(); } catch (IOException e) { throw new ElasticsearchException("Failed to get binary value", e); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryRangeUtil.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryRangeUtil.java index c92bccb4d56bd..68748623e46d3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryRangeUtil.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryRangeUtil.java @@ -9,11 +9,11 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.InetAddressPoint; -import org.apache.lucene.store.ByteArrayDataInput; -import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.TriFunction; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import java.io.IOException; import java.net.InetAddress; @@ -28,8 +28,7 @@ enum BinaryRangeUtil { ; static BytesRef encodeIPRanges(Set ranges) throws IOException { - final byte[] encoded = new byte[5 + (16 * 2) * ranges.size()]; - ByteArrayDataOutput out = new ByteArrayDataOutput(encoded); + BytesStreamOutput out = new BytesStreamOutput(5 + (16 * 2) * ranges.size()); out.writeVInt(ranges.size()); for (RangeFieldMapper.Range range : ranges) { InetAddress fromValue = (InetAddress) range.from; @@ -40,10 +39,10 @@ static BytesRef encodeIPRanges(Set ranges) throws IOExce byte[] encodedToValue = InetAddressPoint.encode(toValue); out.writeBytes(encodedToValue, 0, encodedToValue.length); } - return new BytesRef(encoded, 0, out.getPosition()); + return out.bytes().toBytesRef(); } - static List decodeIPRanges(BytesRef encodedRanges) { + static List decodeIPRanges(BytesRef encodedRanges) throws IOException { return decodeRanges(encodedRanges, RangeType.IP, BinaryRangeUtil::decodeIP); } @@ -59,8 +58,7 @@ static BytesRef encodeLongRanges(Set ranges) throws IOEx Comparator toComparator = Comparator.comparingLong(range -> ((Number) range.to).longValue()); sortedRanges.sort(fromComparator.thenComparing(toComparator)); - final byte[] encoded = new byte[5 + (9 * 2) * sortedRanges.size()]; - ByteArrayDataOutput out = new ByteArrayDataOutput(encoded); + BytesStreamOutput out = new BytesStreamOutput(5 + (9 * 2) * sortedRanges.size()); out.writeVInt(sortedRanges.size()); for (RangeFieldMapper.Range range : sortedRanges) { byte[] encodedFrom = encodeLong(((Number) range.from).longValue()); @@ -68,10 +66,10 @@ static BytesRef encodeLongRanges(Set ranges) throws IOEx byte[] encodedTo = encodeLong(((Number) range.to).longValue()); out.writeBytes(encodedTo, encodedTo.length); } - return new BytesRef(encoded, 0, out.getPosition()); + return out.bytes().toBytesRef(); } - static List decodeLongRanges(BytesRef encodedRanges) { + static List decodeLongRanges(BytesRef encodedRanges) throws IOException { return decodeRanges(encodedRanges, RangeType.LONG, BinaryRangeUtil::decodeLong); } @@ -82,8 +80,7 @@ static BytesRef encodeDoubleRanges(Set ranges) throws IO Comparator toComparator = Comparator.comparingDouble(range -> ((Number) range.to).doubleValue()); sortedRanges.sort(fromComparator.thenComparing(toComparator)); - final byte[] encoded = new byte[5 + (8 * 2) * sortedRanges.size()]; - ByteArrayDataOutput out = new ByteArrayDataOutput(encoded); + BytesStreamOutput out = new BytesStreamOutput(5 + (8 * 2) * sortedRanges.size()); out.writeVInt(sortedRanges.size()); for (RangeFieldMapper.Range range : sortedRanges) { byte[] encodedFrom = encodeDouble(((Number) range.from).doubleValue()); @@ -91,27 +88,27 @@ static BytesRef encodeDoubleRanges(Set ranges) throws IO byte[] encodedTo = encodeDouble(((Number) range.to).doubleValue()); out.writeBytes(encodedTo, encodedTo.length); } - return new BytesRef(encoded, 0, out.getPosition()); + return out.bytes().toBytesRef(); } - static List decodeDoubleRanges(BytesRef encodedRanges) { + static List decodeDoubleRanges(BytesRef encodedRanges) throws IOException { return decodeRanges(encodedRanges, RangeType.DOUBLE, BinaryRangeUtil::decodeDouble); } - static List decodeFloatRanges(BytesRef encodedRanges) { + static List decodeFloatRanges(BytesRef encodedRanges) throws IOException { return decodeRanges(encodedRanges, RangeType.FLOAT, BinaryRangeUtil::decodeFloat); } static List decodeRanges(BytesRef encodedRanges, RangeType rangeType, - TriFunction decodeBytes) { + TriFunction decodeBytes) throws IOException { RangeType.LengthType lengthType = rangeType.lengthType; - ByteArrayDataInput in = new ByteArrayDataInput(); + ByteArrayStreamInput in = new ByteArrayStreamInput(); in.reset(encodedRanges.bytes, encodedRanges.offset, encodedRanges.length); - int numRanges = in.readVInt(); + int numRanges = in.readVInt(); List ranges = new ArrayList<>(numRanges); final byte[] bytes = encodedRanges.bytes; @@ -137,8 +134,7 @@ static BytesRef encodeFloatRanges(Set ranges) throws IOE Comparator toComparator = Comparator.comparingDouble(range -> ((Number) range.to).floatValue()); sortedRanges.sort(fromComparator.thenComparing(toComparator)); - final byte[] encoded = new byte[5 + (4 * 2) * sortedRanges.size()]; - ByteArrayDataOutput out = new ByteArrayDataOutput(encoded); + BytesStreamOutput out = new BytesStreamOutput(5 + (4 * 2) * sortedRanges.size()); out.writeVInt(sortedRanges.size()); for (RangeFieldMapper.Range range : sortedRanges) { byte[] encodedFrom = encodeFloat(((Number) range.from).floatValue()); @@ -146,7 +142,7 @@ static BytesRef encodeFloatRanges(Set ranges) throws IOE byte[] encodedTo = encodeFloat(((Number) range.to).floatValue()); out.writeBytes(encodedTo, encodedTo.length); } - return new BytesRef(encoded, 0, out.getPosition()); + return out.bytes().toBytesRef(); } static byte[] encodeDouble(double number) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeType.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeType.java index 57f8f5834eb51..4d449707e6f44 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RangeType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeType.java @@ -220,7 +220,7 @@ public BytesRef encodeRanges(Set ranges) throws IOExcept } @Override - public List decodeRanges(BytesRef bytes) { + public List decodeRanges(BytesRef bytes) throws IOException { return LONG.decodeRanges(bytes); } @@ -293,7 +293,7 @@ public BytesRef encodeRanges(Set ranges) throws IOExcept } @Override - public List decodeRanges(BytesRef bytes) { + public List decodeRanges(BytesRef bytes) throws IOException { return BinaryRangeUtil.decodeFloatRanges(bytes); } @@ -364,7 +364,7 @@ public BytesRef encodeRanges(Set ranges) throws IOExcept } @Override - public List decodeRanges(BytesRef bytes) { + public List decodeRanges(BytesRef bytes) throws IOException { return BinaryRangeUtil.decodeDoubleRanges(bytes); } @@ -438,7 +438,7 @@ public BytesRef encodeRanges(Set ranges) throws IOExcept } @Override - public List decodeRanges(BytesRef bytes) { + public List decodeRanges(BytesRef bytes) throws IOException { return LONG.decodeRanges(bytes); } @@ -497,7 +497,7 @@ public BytesRef encodeRanges(Set ranges) throws IOExcept } @Override - public List decodeRanges(BytesRef bytes) { + public List decodeRanges(BytesRef bytes) throws IOException { return BinaryRangeUtil.decodeLongRanges(bytes); } @@ -681,7 +681,7 @@ protected final Query createRangeQuery(String field, boolean hasDocValues, Objec // No need to take into account Range#includeFrom or Range#includeTo, because from and to have already been // rounded up via parseFrom and parseTo methods. public abstract BytesRef encodeRanges(Set ranges) throws IOException; - public abstract List decodeRanges(BytesRef bytes); + public abstract List decodeRanges(BytesRef bytes) throws IOException; /** * Given the Range.to or Range.from Object value from a Range instance, converts that value into a Double. Before converting, it diff --git a/server/src/test/java/org/elasticsearch/common/util/ByteUtilsTests.java b/server/src/test/java/org/elasticsearch/common/util/ByteUtilsTests.java index 03197d6302de8..21a52dfbca56f 100644 --- a/server/src/test/java/org/elasticsearch/common/util/ByteUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/ByteUtilsTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.common.util; -import org.apache.lucene.store.ByteArrayDataInput; -import org.apache.lucene.store.ByteArrayDataOutput; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -57,42 +55,4 @@ public void testDouble() throws IOException { } } - public void testVLong() throws IOException { - final long[] data = new long[scaledRandomIntBetween(1000, 10000)]; - for (int i = 0; i < data.length; ++i) { - switch (randomInt(4)) { - case 0: - data[i] = 0; - break; - case 1: - data[i] = Long.MAX_VALUE; - break; - case 2: - data[i] = Long.MIN_VALUE; - break; - case 3: - data[i] = randomInt(1 << randomIntBetween(2,30)); - break; - case 4: - data[i] = randomLong(); - break; - default: - throw new AssertionError(); - } - } - final byte[] encoded = new byte[ByteUtils.MAX_BYTES_VLONG * data.length]; - ByteArrayDataOutput out = new ByteArrayDataOutput(encoded); - for (int i = 0; i < data.length; ++i) { - final int pos = out.getPosition(); - ByteUtils.writeVLong(out, data[i]); - if (data[i] < 0) { - assertEquals(ByteUtils.MAX_BYTES_VLONG, out.getPosition() - pos); - } - } - final ByteArrayDataInput in = new ByteArrayDataInput(encoded); - for (int i = 0; i < data.length; ++i) { - assertEquals(data[i], ByteUtils.readVLong(in)); - } - } - } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java index e4a634f473488..8e9b723217399 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java @@ -21,16 +21,16 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; -import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; import java.io.IOException; import java.util.Objects; /** - * Query that runs an Automaton across all binary doc values (but only for docs that also + * Query that runs an Automaton across all binary doc values (but only for docs that also * match a provided approximation query which is key to getting good performance). */ public class BinaryDvConfirmedAutomatonQuery extends Query { @@ -46,14 +46,14 @@ public BinaryDvConfirmedAutomatonQuery(Query approximation, String field, String this.matchPattern = matchPattern; bytesMatcher = new ByteRunAutomaton(automaton); } - + private BinaryDvConfirmedAutomatonQuery(Query approximation, String field, String matchPattern, ByteRunAutomaton bytesMatcher) { this.approxQuery = approximation; this.field = field; this.matchPattern = matchPattern; this.bytesMatcher = bytesMatcher; - } - + } + @Override public Query rewrite(IndexReader reader) throws IOException { Query approxRewrite = approxQuery.rewrite(reader); @@ -61,17 +61,17 @@ public Query rewrite(IndexReader reader) throws IOException { return new BinaryDvConfirmedAutomatonQuery(approxRewrite, field, matchPattern, bytesMatcher); } return this; - } + } @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { final Weight approxWeight = approxQuery.createWeight(searcher, scoreMode, boost); - + return new ConstantScoreWeight(this, boost) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { - ByteArrayDataInput badi = new ByteArrayDataInput(); + ByteArrayStreamInput bytes = new ByteArrayStreamInput(); final BinaryDocValues values = DocValues.getBinary(context.reader(), field); Scorer approxScorer = approxWeight.scorer(context); if (approxScorer == null) { @@ -89,16 +89,16 @@ public boolean matches() throws IOException { return false; } BytesRef arrayOfValues = values.binaryValue(); - badi.reset(arrayOfValues.bytes); - badi.setPosition(arrayOfValues.offset); + bytes.reset(arrayOfValues.bytes); + bytes.setPosition(arrayOfValues.offset); - int size = badi.readVInt(); + int size = bytes.readVInt(); for (int i=0; i< size; i++) { - int valLength = badi.readVInt(); - if (bytesMatcher.run(arrayOfValues.bytes, badi.getPosition(), valLength)) { + int valLength = bytes.readVInt(); + if (bytesMatcher.run(arrayOfValues.bytes, bytes.getPosition(), valLength)) { return true; } - badi.skipBytes(valLength); + bytes.skipBytes(valLength); } return false; } @@ -130,7 +130,7 @@ public boolean equals(Object obj) { } BinaryDvConfirmedAutomatonQuery other = (BinaryDvConfirmedAutomatonQuery) obj; return Objects.equals(field, other.field) && Objects.equals(matchPattern, other.matchPattern) - && Objects.equals(bytesMatcher, other.bytesMatcher) + && Objects.equals(bytesMatcher, other.bytesMatcher) && Objects.equals(approxQuery, other.approxQuery); } @@ -138,7 +138,7 @@ public boolean equals(Object obj) { public int hashCode() { return Objects.hash(classHash(), field, matchPattern, bytesMatcher, approxQuery); } - + Query getApproximationQuery() { return approxQuery; } From 7a283104c54e12247efc710446e26d90255b109e Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 1 Sep 2021 16:52:45 +0100 Subject: [PATCH 069/128] [ML] Multiple items in a single inference request (#75759) Inference requests can be batched by adding more rows to the input tensor. These batch calls are more performant than making multiple calls to forward() with a single input when all the inputs are of a similar length. The expected input is now a 2D array of tokens and 2D arrays of supporting arguments, the output is a 3D array. --- .../InferTrainedModelDeploymentAction.java | 4 +- .../xpack/core/ml/utils/MlParserUtils.java | 54 +++++++++++ .../xpack/ml/integration/PyTorchModelIT.java | 17 ++-- .../deployment/DeploymentManager.java | 7 +- .../inference/deployment/PyTorchResult.java | 46 +++++---- .../ml/inference/nlp/BertRequestBuilder.java | 37 ++++---- .../nlp/DistilBertRequestBuilder.java | 25 ++--- .../ml/inference/nlp/FillMaskProcessor.java | 30 +++--- .../xpack/ml/inference/nlp/NerProcessor.java | 15 ++- .../xpack/ml/inference/nlp/NlpTask.java | 56 ++++++++++- .../inference/nlp/PassThroughProcessor.java | 7 +- .../nlp/TextClassificationProcessor.java | 35 +------ .../nlp/tokenizers/BertTokenizer.java | 39 ++++++-- .../nlp/tokenizers/NlpTokenizer.java | 7 +- .../nlp/tokenizers/TokenizationResult.java | 95 ++++++++++++------- .../deployment/PyTorchResultTests.java | 7 +- .../nlp/BertRequestBuilderTests.java | 74 +++++++++++++-- .../nlp/DistilBertRequestBuilderTests.java | 51 ++++++++-- .../inference/nlp/FillMaskProcessorTests.java | 19 ++-- .../ml/inference/nlp/NerProcessorTests.java | 11 ++- .../nlp/TextClassificationProcessorTests.java | 23 +++-- .../nlp/tokenizers/BertTokenizerTests.java | 64 +++++++++++-- 22 files changed, 515 insertions(+), 208 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java index a44999d4f1eaa..b8eebfe4c6764 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java @@ -68,8 +68,8 @@ public static Request parseRequest(String deploymentId, XContentParser parser) { return builder.build(); } - private String deploymentId; - private List> docs; + private final String deploymentId; + private final List> docs; public Request(String deploymentId, List> docs) { this.deploymentId = ExceptionsHelper.requireNonNull(deploymentId, DEPLOYMENT_ID); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlParserUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlParserUtils.java index abde9f944cbbe..3e1dafd01ab60 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlParserUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlParserUtils.java @@ -50,4 +50,58 @@ public static List> parseArrayOfArrays(String fieldName, CheckedFunc } return values; } + + /** + * Parses a 3 dimensional array of doubles. + * + * @param fieldName the field name + * @param parser the outer parser + * @return The 3D array of doubles + * @throws IOException If parsing fails + */ + public static double[][][] parse3DArrayOfDoubles(String fieldName, XContentParser parser) throws IOException { + if (parser.currentToken() != XContentParser.Token.START_ARRAY) { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]"); + } + List>> values = new ArrayList<>(); + while(parser.nextToken() != XContentParser.Token.END_ARRAY) { + if (parser.currentToken() != XContentParser.Token.START_ARRAY) { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]"); + } + + List> innerList = new ArrayList<>(); + + while(parser.nextToken() != XContentParser.Token.END_ARRAY) { + if (parser.currentToken() != XContentParser.Token.START_ARRAY) { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]"); + } + + if (parser.currentToken() != XContentParser.Token.START_ARRAY) { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]"); + } + + List innerInner = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) { + throw new IllegalStateException("expected non-null numerical value but got [" + parser.currentToken() + "] " + + "for [" + fieldName + "]"); + } + innerInner.add(parser.doubleValue()); + } + innerList.add(innerInner); + } + values.add(innerList); + } + + double [][][] val = new double[values.size()][values.get(0).size()][values.get(0).get(0).size()]; + + for (int i = 0; i < val.length; i++) { + for (int j = 0; j < val[0].length; j++) { + double[] doubles = values.get(i).get(j).stream().mapToDouble(d -> d).toArray(); + System.arraycopy(doubles, 0, val[i][j], 0, doubles.length); + } + } + + return val; + } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java index 2bf9945c0e966..5d94b0f10d871 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java @@ -18,10 +18,12 @@ import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; import org.elasticsearch.xpack.core.ml.utils.MapHelper; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; import java.util.Base64; import java.util.List; import java.util.Map; @@ -109,7 +111,8 @@ public void setLogging() throws IOException { "{" + "\"transient\" : {\n" + " \"logger.org.elasticsearch.xpack.ml.inference.allocation\" : \"TRACE\",\n" + - " \"logger.org.elasticsearch.xpack.ml.inference.deployment\" : \"TRACE\"\n" + + " \"logger.org.elasticsearch.xpack.ml.inference.deployment\" : \"TRACE\",\n" + + " \"logger.org.elasticsearch.xpack.ml.process.logging\" : \"TRACE\"\n" + " }" + "}"); client().performRequest(loggingSettings); @@ -124,7 +127,8 @@ public void cleanup() throws Exception { "{" + "\"transient\" : {\n" + " \"logger.org.elasticsearch.xpack.ml.inference.allocation\" :null,\n" + - " \"logger.org.elasticsearch.xpack.ml.inference.deployment\" : null\n" + + " \"logger.org.elasticsearch.xpack.ml.inference.deployment\" : null,\n" + + " \"logger.org.elasticsearch.xpack.ml.process.logging\" : null\n" + " }" + "}"); client().performRequest(loggingSettings); @@ -133,7 +137,6 @@ public void cleanup() throws Exception { waitForPendingTasks(adminClient()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pull/1961") public void testEvaluate() throws IOException, InterruptedException { String modelId = "test_evaluate"; createModelStoreIndex(); @@ -168,7 +171,6 @@ public void testEvaluate() throws IOException, InterruptedException { } @SuppressWarnings("unchecked") - @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pull/1961") public void testLiveDeploymentStats() throws IOException { String modelA = "model_a"; @@ -193,7 +195,6 @@ public void testLiveDeploymentStats() throws IOException { } @SuppressWarnings("unchecked") - @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pull/1961") public void testGetDeploymentStats_WithWildcard() throws IOException { { @@ -262,7 +263,6 @@ public void testGetDeploymentStats_WithWildcard() throws IOException { } @SuppressWarnings("unchecked") - @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pull/1961") public void testGetDeploymentStats_WithStartedStoppedDeployments() throws IOException { putVocabulary(List.of("once", "twice")); String modelFoo = "foo"; @@ -367,7 +367,10 @@ private void createModelStoreIndex() throws IOException { } private void putVocabulary(List vocabulary) throws IOException { - String quotedWords = vocabulary.stream().map(s -> "\"" + s + "\"").collect(Collectors.joining(",")); + List vocabularyWithPad = new ArrayList<>(); + vocabularyWithPad.add(BertTokenizer.PAD_TOKEN); + vocabularyWithPad.addAll(vocabulary); + String quotedWords = vocabularyWithPad.stream().map(s -> "\"" + s + "\"").collect(Collectors.joining(",")); Request request = new Request("PUT", "/" + VOCAB_INDEX + "/_doc/test_vocab"); request.setJsonEntity("{ " + diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index e54c8567bb415..36ec0e48aee6a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -49,6 +49,8 @@ import java.io.IOException; import java.io.InputStream; +import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -232,7 +234,10 @@ public void onFailure(Exception e) { @Override protected void doRun() { try { - String text = NlpTask.extractInput(processContext.modelInput.get(), doc); + // The request builder expect a list of inputs which are then batched. + // TODO batching was implemented for expected use-cases such as zero-shot + // classification but is not used here. + List text = Collections.singletonList(NlpTask.extractInput(processContext.modelInput.get(), doc)); NlpTask.Processor processor = processContext.nlpTaskProcessor.get(); processor.validateInputs(text); NlpTask.Request request = processor.getRequestBuilder().buildRequest(text, requestId); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/PyTorchResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/PyTorchResult.java index e4e6d9aef0a25..8455e4d3c7bfb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/PyTorchResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/PyTorchResult.java @@ -7,21 +7,20 @@ package org.elasticsearch.xpack.ml.inference.deployment; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.core.ml.utils.MlParserUtils; import java.io.IOException; import java.util.Arrays; -import java.util.List; import java.util.Objects; /** @@ -37,21 +36,13 @@ public class PyTorchResult implements ToXContentObject, Writeable { private static final ParseField TIME_MS = new ParseField("time_ms"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("pytorch_result", - a -> new PyTorchResult((String) a[0], (double[][]) a[1], (Long) a[2], (String) a[3])); + a -> new PyTorchResult((String) a[0], (double[][][]) a[1], (Long) a[2], (String) a[3])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), REQUEST_ID); PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> { - List> listOfListOfDoubles = MlParserUtils.parseArrayOfArrays( - INFERENCE.getPreferredName(), XContentParser::doubleValue, p); - double[][] primitiveDoubles = new double[listOfListOfDoubles.size()][]; - for (int i = 0; i < listOfListOfDoubles.size(); i++) { - List row = listOfListOfDoubles.get(i); - primitiveDoubles[i] = row.stream().mapToDouble(d -> d).toArray(); - } - return primitiveDoubles; - }, + (p, c) -> + MlParserUtils.parse3DArrayOfDoubles(INFERENCE.getPreferredName(), p), INFERENCE, ObjectParser.ValueType.VALUE_ARRAY ); @@ -64,12 +55,12 @@ public static PyTorchResult fromXContent(XContentParser parser) throws IOExcepti } private final String requestId; - private final double[][] inference; + private final double[][][] inference; private final Long timeMs; private final String error; public PyTorchResult(String requestId, - @Nullable double[][] inference, + @Nullable double[][][] inference, @Nullable Long timeMs, @Nullable String error) { this.requestId = Objects.requireNonNull(requestId); @@ -82,7 +73,7 @@ public PyTorchResult(StreamInput in) throws IOException { requestId = in.readString(); boolean hasInference = in.readBoolean(); if (hasInference) { - inference = in.readArray(StreamInput::readDoubleArray, double[][]::new); + inference = in.readArray(in2 -> in2.readArray(StreamInput::readDoubleArray, double[][]::new), double[][][]::new); } else { inference = null; } @@ -102,7 +93,7 @@ public String getError() { return error; } - public double[][] getInferenceResult() { + public double[][][] getInferenceResult() { return inference; } @@ -115,7 +106,20 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(REQUEST_ID.getPreferredName(), requestId); if (inference != null) { - builder.field(INFERENCE.getPreferredName(), inference); + builder.startArray(INFERENCE.getPreferredName()); + for (int i = 0; i < inference.length; i++) { + builder.startArray(); + for (int j = 0; j < inference[0].length; j++) + { + builder.startArray(); + for (int k = 0; k < inference[0][0].length; k++) { + builder.value(inference[i][j][k]); + } + builder.endArray(); + } + builder.endArray(); + } + builder.endArray(); } if (timeMs != null) { builder.field(TIME_MS.getPreferredName(), timeMs); @@ -134,7 +138,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(false); } else { out.writeBoolean(true); - out.writeArray(StreamOutput::writeDoubleArray, inference); + out.writeArray( + (out2, arr) -> out2.writeArray(StreamOutput::writeDoubleArray, arr), + inference); } out.writeOptionalLong(timeMs); out.writeOptionalString(error); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java index cd9191f1c0ffe..0572e664ea0e0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; import java.io.IOException; -import java.util.Arrays; +import java.util.List; public class BertRequestBuilder implements NlpTask.RequestBuilder { @@ -31,30 +31,33 @@ public BertRequestBuilder(BertTokenizer tokenizer) { } @Override - public NlpTask.Request buildRequest(String input, String requestId) throws IOException { - TokenizationResult tokenization = tokenizer.tokenize(input); - return new NlpTask.Request(tokenization, jsonRequest(tokenization.getTokenIds(), requestId)); + public NlpTask.Request buildRequest(List inputs, String requestId) throws IOException { + if (tokenizer.getPadToken().isEmpty()) { + throw new IllegalStateException("The input tokenizer does not have a " + BertTokenizer.PAD_TOKEN + + " token in its vocabulary"); + } + + TokenizationResult tokenization = tokenizer.tokenize(inputs); + return new NlpTask.Request(tokenization, jsonRequest(tokenization, tokenizer.getPadToken().getAsInt(), requestId)); } - static BytesReference jsonRequest(int[] tokens, String requestId) throws IOException { + static BytesReference jsonRequest(TokenizationResult tokenization, + int padToken, + String requestId) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); builder.field(REQUEST_ID, requestId); - builder.array(TOKENS, tokens); - - int[] inputMask = new int[tokens.length]; - Arrays.fill(inputMask, 1); - int[] segmentMask = new int[tokens.length]; - Arrays.fill(segmentMask, 0); - int[] positionalIds = new int[tokens.length]; - Arrays.setAll(positionalIds, i -> i); - - builder.array(ARG1, inputMask); - builder.array(ARG2, segmentMask); - builder.array(ARG3, positionalIds); + + NlpTask.RequestBuilder.writePaddedTokens(TOKENS, tokenization, padToken, (tokens, i) -> tokens.getTokenIds()[i], builder); + NlpTask.RequestBuilder.writePaddedTokens(ARG1, tokenization, padToken, (tokens, i) -> 1, builder); + int batchSize = tokenization.getTokenizations().size(); + NlpTask.RequestBuilder.writeNonPaddedArguments(ARG2, batchSize, tokenization.getLongestSequenceLength(), i -> 0, builder); + NlpTask.RequestBuilder.writeNonPaddedArguments(ARG3, batchSize, tokenization.getLongestSequenceLength(), i -> i, builder); builder.endObject(); // BytesReference.bytes closes the builder return BytesReference.bytes(builder); } + + } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/DistilBertRequestBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/DistilBertRequestBuilder.java index df842659f2dc5..b406a801c81f1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/DistilBertRequestBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/DistilBertRequestBuilder.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; import java.io.IOException; -import java.util.Arrays; +import java.util.List; public class DistilBertRequestBuilder implements NlpTask.RequestBuilder { @@ -29,21 +29,24 @@ public DistilBertRequestBuilder(BertTokenizer tokenizer) { } @Override - public NlpTask.Request buildRequest(String input, String requestId) throws IOException { - TokenizationResult result = tokenizer.tokenize(input); - return new NlpTask.Request(result, jsonRequest(result.getTokenIds(), requestId)); + public NlpTask.Request buildRequest(List inputs, String requestId) throws IOException { + if (tokenizer.getPadToken().isEmpty()) { + throw new IllegalStateException("The input tokenizer does not have a " + BertTokenizer.PAD_TOKEN + + " token in its vocabulary"); + } + + TokenizationResult result = tokenizer.tokenize(inputs); + return new NlpTask.Request(result, jsonRequest(result, tokenizer.getPadToken().getAsInt(), requestId)); } - static BytesReference jsonRequest(int[] tokens, String requestId) throws IOException { + static BytesReference jsonRequest(TokenizationResult tokenization, + int padToken, + String requestId) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); builder.field(REQUEST_ID, requestId); - builder.array(TOKENS, tokens); - - int[] inputMask = new int[tokens.length]; - Arrays.fill(inputMask, 1); - - builder.array(ARG1, inputMask); + NlpTask.RequestBuilder.writePaddedTokens(TOKENS, tokenization, padToken, (tokens, i) -> tokens.getTokenIds()[i], builder); + NlpTask.RequestBuilder.writePaddedTokens(ARG1, tokenization, padToken, (tokens, i) -> 1, builder); builder.endObject(); // BytesReference.bytes closes the builder diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java index 65ffdc4293783..a8c1e5e601620 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java @@ -30,19 +30,21 @@ public class FillMaskProcessor implements NlpTask.Processor { } @Override - public void validateInputs(String inputs) { - if (inputs.isBlank()) { + public void validateInputs(List inputs) { + if (inputs.isEmpty()) { throw new IllegalArgumentException("input request is empty"); } - int maskIndex = inputs.indexOf(BertTokenizer.MASK_TOKEN); - if (maskIndex < 0) { - throw new IllegalArgumentException("no " + BertTokenizer.MASK_TOKEN + " token could be found"); - } + for (String input : inputs) { + int maskIndex = input.indexOf(BertTokenizer.MASK_TOKEN); + if (maskIndex < 0) { + throw new IllegalArgumentException("no " + BertTokenizer.MASK_TOKEN + " token could be found"); + } - maskIndex = inputs.indexOf(BertTokenizer.MASK_TOKEN, maskIndex + BertTokenizer.MASK_TOKEN.length()); - if (maskIndex > 0) { - throw new IllegalArgumentException("only one " + BertTokenizer.MASK_TOKEN + " token should exist in the input"); + maskIndex = input.indexOf(BertTokenizer.MASK_TOKEN, maskIndex + BertTokenizer.MASK_TOKEN.length()); + if (maskIndex > 0) { + throw new IllegalArgumentException("only one " + BertTokenizer.MASK_TOKEN + " token should exist in the input"); + } } } @@ -58,18 +60,20 @@ public NlpTask.ResultProcessor getResultProcessor() { InferenceResults processResult(TokenizationResult tokenization, PyTorchResult pyTorchResult) { - if (tokenization.getTokens().isEmpty()) { + if (tokenization.getTokenizations().isEmpty() || + tokenization.getTokenizations().get(0).getTokens().isEmpty()) { return new FillMaskResults(Collections.emptyList()); } - int maskTokenIndex = tokenization.getTokens().indexOf(BertTokenizer.MASK_TOKEN); - double[] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[maskTokenIndex]); + int maskTokenIndex = tokenization.getTokenizations().get(0).getTokens().indexOf(BertTokenizer.MASK_TOKEN); + // TODO - process all results in the batch + double[] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[0][maskTokenIndex]); NlpHelpers.ScoreAndIndex[] scoreAndIndices = NlpHelpers.topK(NUM_RESULTS, normalizedScores); List results = new ArrayList<>(NUM_RESULTS); for (NlpHelpers.ScoreAndIndex scoreAndIndex : scoreAndIndices) { String predictedToken = tokenization.getFromVocab(scoreAndIndex.index); - String sequence = tokenization.getInput().replace(BertTokenizer.MASK_TOKEN, predictedToken); + String sequence = tokenization.getTokenizations().get(0).getInput().replace(BertTokenizer.MASK_TOKEN, predictedToken); results.add(new FillMaskResults.Prediction(predictedToken, scoreAndIndex.score, sequence)); } return new FillMaskResults(results); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java index 1a70c7bda9141..1aa7582d9575f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java @@ -119,7 +119,7 @@ static IobTag[] buildIobMap(List classificationLabels) { } @Override - public void validateInputs(String inputs) { + public void validateInputs(List inputs) { // No validation } @@ -142,17 +142,20 @@ static class NerResultProcessor implements NlpTask.ResultProcessor { @Override public InferenceResults processResult(TokenizationResult tokenization, PyTorchResult pyTorchResult) { - if (tokenization.getTokens().isEmpty()) { + if (tokenization.getTokenizations().isEmpty() || + tokenization.getTokenizations().get(0).getTokens().isEmpty()) { return new NerResults(Collections.emptyList()); } + // TODO - process all results in the batch + // TODO It might be best to do the soft max after averaging scores for // sub-tokens. If we had a word that is "elastic" which is tokenized to // "el" and "astic" then perhaps we get a prediction for org of 10 for "el" // and -5 for "astic". Averaging after softmax would produce a prediction // of maybe (1 + 0) / 2 = 0.5 while before softmax it'd be exp(10 - 5) / normalization // which could easily be close to 1. - double[][] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()); - List taggedTokens = tagTokens(tokenization, normalizedScores); + double[][] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[0]); + List taggedTokens = tagTokens(tokenization.getTokenizations().get(0), normalizedScores, iobMap); List entities = groupTaggedTokens(taggedTokens); return new NerResults(entities); } @@ -163,7 +166,9 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchRe * in the original input replacing them with a single token that * gets labelled based on the average score of all its sub-tokens. */ - private List tagTokens(TokenizationResult tokenization, double[][] scores) { + static List tagTokens(TokenizationResult.Tokenization tokenization, + double[][] scores, + IobTag[] iobMap) { List taggedTokens = new ArrayList<>(); int startTokenIndex = 0; while (startTokenIndex < tokenization.getTokens().size()) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java index 3bc0c26c52e44..b92b8743930bc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java @@ -9,16 +9,18 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; +import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.inference.deployment.PyTorchResult; -import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; import java.io.IOException; +import java.util.List; import java.util.Map; import java.util.Objects; @@ -34,7 +36,7 @@ public NlpTask(NlpConfig config, Vocabulary vocabulary) { /** * Create and validate the NLP Processor - * @return + * @return the processor based on task type * @throws ValidationException if the validation fails */ public Processor createProcessor() throws ValidationException { @@ -42,7 +44,53 @@ public Processor createProcessor() throws ValidationException { } public interface RequestBuilder { - Request buildRequest(String inputs, String requestId) throws IOException; + @FunctionalInterface + interface IntToIntFunction { + int applyAsInt(int value); + } + + @FunctionalInterface + interface TokenLookupFunction { + int apply(TokenizationResult.Tokenization tokenization, int index); + } + + Request buildRequest(List inputs, String requestId) throws IOException; + + static void writePaddedTokens(String fieldName, + TokenizationResult tokenization, + int padToken, + TokenLookupFunction generator, + XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (var inputTokens : tokenization.getTokenizations()) { + builder.startArray(); + int i = 0; + for (; i < inputTokens.getTokenIds().length; i++) { + builder.value(generator.apply(inputTokens, i)); + } + + for (; i < tokenization.getLongestSequenceLength(); i++) { + builder.value(padToken); + } + builder.endArray(); + } + builder.endArray(); + } + + static void writeNonPaddedArguments(String fieldName, + int numTokenizations, int longestSequenceLength, + IntToIntFunction generator, + XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (int i = 0; i < numTokenizations; i++) { + builder.startArray(); + for (int j = 0; j < longestSequenceLength; j++) { + builder.value(generator.applyAsInt(j)); + } + builder.endArray(); + } + builder.endArray(); + } } public interface ResultProcessor { @@ -60,7 +108,7 @@ public interface Processor { * * @param inputs Text to validate */ - void validateInputs(String inputs); + void validateInputs(List inputs); RequestBuilder getRequestBuilder(); ResultProcessor getResultProcessor(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java index 288cd99af6795..6da24823f0a7c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java @@ -14,6 +14,8 @@ import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; +import java.util.List; + /** * A NLP processor that directly returns the PyTorch result * without any post-processing @@ -27,7 +29,7 @@ public class PassThroughProcessor implements NlpTask.Processor { } @Override - public void validateInputs(String inputs) { + public void validateInputs(List inputs) { // nothing to validate } @@ -42,6 +44,7 @@ public NlpTask.ResultProcessor getResultProcessor() { } private static InferenceResults processResult(TokenizationResult tokenization, PyTorchResult pyTorchResult) { - return new PyTorchPassThroughResults(pyTorchResult.getInferenceResult()); + // TODO - process all results in the batch + return new PyTorchPassThroughResults(pyTorchResult.getInferenceResult()[0]); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java index 91cfc0bc7d7a8..00b1839852d79 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java @@ -9,9 +9,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.TextClassificationResults; import org.elasticsearch.xpack.core.ml.inference.results.TopClassEntry; @@ -21,8 +18,6 @@ import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; -import java.io.IOException; -import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Locale; @@ -31,12 +26,12 @@ public class TextClassificationProcessor implements NlpTask.Processor { - private final NlpTokenizer tokenizer; + private final NlpTask.RequestBuilder requestBuilder; private final String[] classLabels; private final int numTopClasses; TextClassificationProcessor(NlpTokenizer tokenizer, TextClassificationConfig config) { - this.tokenizer = tokenizer; + this.requestBuilder = tokenizer.requestBuilder(); List classLabels = config.getClassificationLabels(); if (classLabels == null || classLabels.isEmpty()) { this.classLabels = new String[] {"negative", "positive"}; @@ -73,18 +68,13 @@ private void validate() { } @Override - public void validateInputs(String inputs) { + public void validateInputs(List inputs) { // nothing to validate } @Override public NlpTask.RequestBuilder getRequestBuilder() { - return this::buildRequest; - } - - NlpTask.Request buildRequest(String input, String requestId) throws IOException { - TokenizationResult tokenization = tokenizer.tokenize(input); - return new NlpTask.Request(tokenization, jsonRequest(tokenization.getTokenIds(), requestId)); + return requestBuilder; } @Override @@ -105,7 +95,7 @@ InferenceResults processResult(TokenizationResult tokenization, PyTorchResult py ); } - double[] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[0]); + double[] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[0][0]); return new TextClassificationResults( IntStream.range(0, normalizedScores.length) .mapToObj(i -> new TopClassEntry(classLabels[i], normalizedScores[i])) @@ -115,19 +105,4 @@ InferenceResults processResult(TokenizationResult tokenization, PyTorchResult py .collect(Collectors.toList()) ); } - - static BytesReference jsonRequest(int[] tokens, String requestId) throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.field(BertRequestBuilder.REQUEST_ID, requestId); - builder.array(BertRequestBuilder.TOKENS, tokens); - - int[] inputMask = new int[tokens.length]; - Arrays.fill(inputMask, 1); - builder.array(BertRequestBuilder.ARG1, inputMask); - builder.endObject(); - - // BytesReference.bytes closes the builder - return BytesReference.bytes(builder); - } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java index a93db982fd840..48e5154261fd5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java @@ -13,10 +13,9 @@ import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; -import java.util.HashSet; import java.util.List; +import java.util.OptionalInt; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; @@ -42,7 +41,7 @@ public class BertTokenizer implements NlpTokenizer { public static final int DEFAULT_MAX_INPUT_CHARS_PER_WORD = 100; - private final Set NEVER_SPLIT = new HashSet<>(Arrays.asList(MASK_TOKEN)); + private final Set NEVER_SPLIT = Set.of(MASK_TOKEN); private final WordPieceTokenizer wordPieceTokenizer; private final List originalVocab; @@ -78,16 +77,28 @@ protected BertTokenizer(List originalVocab, } /** - * Tokenize the input according to the basic tokenization options - * then perform Word Piece tokenization with the given vocabulary. + * Tokenize the list of inputs according to the basic tokenization + * options then perform Word Piece tokenization with the given vocabulary. * * The result is the Word Piece tokens, a map of the Word Piece - * token position to the position of the token in the source + * token position to the position of the token in the source for + * each input string grouped into a {@link Tokenization}. + * * @param text Text to tokenize - * @return Tokenized text, token Ids and map + * @return A {@link Tokenization} */ @Override - public TokenizationResult tokenize(String text) { + public TokenizationResult tokenize(List text) { + TokenizationResult tokenization = new TokenizationResult(originalVocab); + + for (String input: text) { + addTokenization(tokenization, input); + } + return tokenization; + } + + + private void addTokenization(TokenizationResult tokenization, String text) { BasicTokenizer basicTokenizer = new BasicTokenizer(doLowerCase, doTokenizeCjKChars, doStripAccents, neverSplit); List delineatedTokens = basicTokenizer.tokenize(text); @@ -145,7 +156,17 @@ public TokenizationResult tokenize(String text) { ); } - return new TokenizationResult(text, originalVocab, tokens, tokenIds, tokenMap); + tokenization.addTokenization(text, tokens, tokenIds, tokenMap); + } + + @Override + public OptionalInt getPadToken() { + Integer pad = vocab.get(PAD_TOKEN); + if (pad != null) { + return OptionalInt.of(pad); + } else { + return OptionalInt.empty(); + } } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java index d14f9fd7517ce..42543e34a6315 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java @@ -16,15 +16,20 @@ import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; import org.elasticsearch.xpack.ml.inference.nlp.Vocabulary; +import java.util.List; +import java.util.OptionalInt; + import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.VOCABULARY; public interface NlpTokenizer { - TokenizationResult tokenize(String text); + TokenizationResult tokenize(List text); NlpTask.RequestBuilder requestBuilder(); + OptionalInt getPadToken(); + static NlpTokenizer build(Vocabulary vocabulary, Tokenization params) { ExceptionsHelper.requireNonNull(params, TOKENIZATION); ExceptionsHelper.requireNonNull(vocabulary, VOCABULARY); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java index a464bfc46253c..1fac5c9f096c3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java @@ -7,57 +7,84 @@ package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; +import java.util.ArrayList; import java.util.List; public class TokenizationResult { - String input; - final List vocab; - private final List tokens; - private final int [] tokenIds; - private final int [] tokenMap; + private final List vocab; + private final List tokenizations = new ArrayList<>(); + private int maxLength; - public TokenizationResult(String input, List vocab, List tokens, int[] tokenIds, int[] tokenMap) { - assert tokens.size() == tokenIds.length; - assert tokenIds.length == tokenMap.length; - this.input = input; + public TokenizationResult(List vocab) { this.vocab = vocab; - this.tokens = tokens; - this.tokenIds = tokenIds; - this.tokenMap = tokenMap; + this.maxLength = -1; } public String getFromVocab(int tokenId) { return vocab.get(tokenId); } - /** - * The token strings from the tokenization process - * @return A list of tokens - */ - public List getTokens() { - return tokens; + public List getTokenizations() { + return tokenizations; } - /** - * The integer values of the tokens in {@link #getTokens()} - * @return A list of token Ids - */ - public int[] getTokenIds() { - return tokenIds; + public void addTokenization(String input, List tokens, int[] tokenIds, int[] tokenMap) { + maxLength = Math.max(maxLength, tokenIds.length); + tokenizations.add(new Tokenization(input, tokens, tokenIds, tokenMap)); } - /** - * Maps the token position to the position in the source text. - * Source words may be divided into more than one token so more - * than one token can map back to the source token - * @return Map of source token to - */ - public int[] getTokenMap() { - return tokenMap; + public int getLongestSequenceLength() { + return maxLength; } - public String getInput() { - return input; + public static class Tokenization { + + String input; + private final List tokens; + private final int[] tokenIds; + private final int[] tokenMap; + + public Tokenization(String input, List tokens, int[] tokenIds, int[] tokenMap) { + assert tokens.size() == tokenIds.length; + assert tokenIds.length == tokenMap.length; + this.input = input; + this.tokens = tokens; + this.tokenIds = tokenIds; + this.tokenMap = tokenMap; + } + + /** + * The token strings from the tokenization process + * + * @return A list of tokens + */ + public List getTokens() { + return tokens; + } + + /** + * The integer values of the tokens in {@link #getTokens()} + * + * @return A list of token Ids + */ + public int[] getTokenIds() { + return tokenIds; + } + + /** + * Maps the token position to the position in the source text. + * Source words may be divided into more than one token so more + * than one token can map back to the source token + * + * @return Map of source token to + */ + public int[] getTokenMap() { + return tokenMap; + } + + public String getInput() { + return input; + } } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/PyTorchResultTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/PyTorchResultTests.java index e73b9acdee617..89cd48467f390 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/PyTorchResultTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/PyTorchResultTests.java @@ -33,10 +33,13 @@ protected PyTorchResult createTestInstance() { } else { int rows = randomIntBetween(1, 10); int columns = randomIntBetween(1, 10); - double [][] arr = new double[rows][columns]; + int depth = randomIntBetween(1, 10); + double [][][] arr = new double[rows][columns][depth]; for (int i=0; i jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertEquals("request1", jsonDocAsMap.get("request_id")); - assertEquals(Arrays.asList(3, 0, 1, 2, 4), jsonDocAsMap.get("tokens")); - assertEquals(Arrays.asList(1, 1, 1, 1, 1), jsonDocAsMap.get("arg_1")); - assertEquals(Arrays.asList(0, 0, 0, 0, 0), jsonDocAsMap.get("arg_2")); - assertEquals(Arrays.asList(0, 1, 2, 3, 4), jsonDocAsMap.get("arg_3")); + assertEquals(Arrays.asList(3, 0, 1, 2, 4), firstListItemFromMap("tokens", jsonDocAsMap)); + assertEquals(Arrays.asList(1, 1, 1, 1, 1), firstListItemFromMap("arg_1", jsonDocAsMap)); + assertEquals(Arrays.asList(0, 0, 0, 0, 0), firstListItemFromMap("arg_2", jsonDocAsMap)); + assertEquals(Arrays.asList(0, 1, 2, 3, 4), firstListItemFromMap("arg_3", jsonDocAsMap)); + } + + @SuppressWarnings("unchecked") + private List firstListItemFromMap(String name, Map jsonDocAsMap) { + return nthListItemFromMap(name, 0, jsonDocAsMap); + } + + @SuppressWarnings("unchecked") + public static List nthListItemFromMap(String name, int n, Map jsonDocAsMap) { + return ((List>)jsonDocAsMap.get(name)).get(n); } public void testInputTooLarge() throws IOException { BertTokenizer tokenizer = BertTokenizer.builder( - Arrays.asList("Elastic", "##search", "fun", BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN), + Arrays.asList("Elastic", "##search", "fun", BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN, BertTokenizer.PAD_TOKEN), new BertTokenization(null, null, 5) ).build(); { BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, - () -> requestBuilder.buildRequest("Elasticsearch fun Elasticsearch fun Elasticsearch fun", "request1")); + () -> requestBuilder.buildRequest(Collections.singletonList("Elasticsearch fun Elasticsearch fun Elasticsearch fun"), + "request1")); assertThat(e.getMessage(), containsString("Input too large. The tokenized input length [11] exceeds the maximum sequence length [5]")); @@ -59,7 +71,49 @@ public void testInputTooLarge() throws IOException { BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); // input will become 3 tokens + the Class and Separator token = 5 which is // our max sequence length - requestBuilder.buildRequest("Elasticsearch fun", "request1"); + requestBuilder.buildRequest(Collections.singletonList("Elasticsearch fun"), "request1"); } } + + @SuppressWarnings("unchecked") + public void testBatchWithPadding() throws IOException { + BertTokenizer tokenizer = BertTokenizer.builder( + Arrays.asList(BertTokenizer.PAD_TOKEN, BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN, + "Elastic", "##search", "fun", + "Pancake", "day", + "my", "little", "red", "car", + "God", "##zilla" + ), + new BertTokenization(null, null, 512) + ).build(); + + BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); + NlpTask.Request request = requestBuilder.buildRequest( + List.of("Elasticsearch", + "my little red car", + "Godzilla day"), "request1"); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + + assertThat(jsonDocAsMap.keySet(), hasSize(5)); + assertThat((List>) jsonDocAsMap.get("tokens"), hasSize(3)); + assertThat((List>) jsonDocAsMap.get("arg_1"), hasSize(3)); + assertThat((List>) jsonDocAsMap.get("arg_2"), hasSize(3)); + assertThat((List>) jsonDocAsMap.get("arg_3"), hasSize(3)); + + assertEquals("request1", jsonDocAsMap.get("request_id")); + assertEquals(Arrays.asList(1, 3, 4, 2, 0, 0), nthListItemFromMap("tokens", 0, jsonDocAsMap)); + assertEquals(Arrays.asList(1, 1, 1, 1, 0, 0), nthListItemFromMap("arg_1", 0, jsonDocAsMap)); + assertEquals(Arrays.asList(0, 0, 0, 0, 0, 0), nthListItemFromMap("arg_2", 0, jsonDocAsMap)); + assertEquals(Arrays.asList(0, 1, 2, 3, 4, 5), nthListItemFromMap("arg_3", 0, jsonDocAsMap)); + + assertEquals(Arrays.asList(1, 8, 9, 10, 11, 2), nthListItemFromMap("tokens", 1, jsonDocAsMap)); + assertEquals(Arrays.asList(1, 1, 1, 1, 1, 1), nthListItemFromMap("arg_1", 1, jsonDocAsMap)); + assertEquals(Arrays.asList(0, 0, 0, 0, 0, 0), nthListItemFromMap("arg_2", 1, jsonDocAsMap)); + assertEquals(Arrays.asList(0, 1, 2, 3, 4, 5), nthListItemFromMap("arg_3", 1, jsonDocAsMap)); + + assertEquals(Arrays.asList(1, 12, 13, 7, 2, 0), nthListItemFromMap("tokens", 2, jsonDocAsMap)); + assertEquals(Arrays.asList(1, 1, 1, 1, 1, 0), nthListItemFromMap("arg_1", 2, jsonDocAsMap)); + assertEquals(Arrays.asList(0, 0, 0, 0, 0, 0), nthListItemFromMap("arg_2", 2, jsonDocAsMap)); + assertEquals(Arrays.asList(0, 1, 2, 3, 4, 5), nthListItemFromMap("arg_3", 2, jsonDocAsMap)); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/DistilBertRequestBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/DistilBertRequestBuilderTests.java index 9940e1658c65f..f591737ff3451 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/DistilBertRequestBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/DistilBertRequestBuilderTests.java @@ -12,13 +12,16 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.DistilBertTokenization; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; import java.io.IOException; import java.util.Arrays; +import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.ml.inference.nlp.BertRequestBuilderTests.nthListItemFromMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; @@ -26,30 +29,30 @@ public class DistilBertRequestBuilderTests extends ESTestCase { public void testBuildRequest() throws IOException { BertTokenizer tokenizer = BertTokenizer.builder( - Arrays.asList("Elastic", "##search", "fun", BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN), + Arrays.asList("Elastic", "##search", "fun", BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN, BertTokenizer.PAD_TOKEN), new DistilBertTokenization(null, null, 512) ).build(); DistilBertRequestBuilder requestBuilder = new DistilBertRequestBuilder(tokenizer); - BytesReference bytesReference = requestBuilder.buildRequest("Elasticsearch fun", "request1").processInput; + BytesReference bytesReference = requestBuilder.buildRequest(List.of("Elasticsearch fun"), "request1").processInput; Map jsonDocAsMap = XContentHelper.convertToMap(bytesReference, true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(3)); assertEquals("request1", jsonDocAsMap.get("request_id")); - assertEquals(Arrays.asList(3, 0, 1, 2, 4), jsonDocAsMap.get("tokens")); - assertEquals(Arrays.asList(1, 1, 1, 1, 1), jsonDocAsMap.get("arg_1")); + assertEquals(Arrays.asList(3, 0, 1, 2, 4), nthListItemFromMap("tokens", 0, jsonDocAsMap)); + assertEquals(Arrays.asList(1, 1, 1, 1, 1), nthListItemFromMap("arg_1", 0, jsonDocAsMap)); } public void testInputTooLarge() throws IOException { BertTokenizer tokenizer = BertTokenizer.builder( - Arrays.asList("Elastic", "##search", "fun", BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN), + Arrays.asList("Elastic", "##search", "fun", BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN, BertTokenizer.PAD_TOKEN), new DistilBertTokenization(null, null, 5) ).build(); { DistilBertRequestBuilder requestBuilder = new DistilBertRequestBuilder(tokenizer); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, - () -> requestBuilder.buildRequest("Elasticsearch fun Elasticsearch fun Elasticsearch fun", "request1")); + () -> requestBuilder.buildRequest(List.of("Elasticsearch fun Elasticsearch fun Elasticsearch fun"), "request1")); assertThat(e.getMessage(), containsString("Input too large. The tokenized input length [11] exceeds the maximum sequence length [5]")); @@ -58,7 +61,41 @@ public void testInputTooLarge() throws IOException { DistilBertRequestBuilder requestBuilder = new DistilBertRequestBuilder(tokenizer); // input will become 3 tokens + the Class and Separator token = 5 which is // our max sequence length - requestBuilder.buildRequest("Elasticsearch fun", "request1"); + requestBuilder.buildRequest(List.of("Elasticsearch fun"), "request1"); } } + + @SuppressWarnings("unchecked") + public void testBatchWithPadding() throws IOException { + BertTokenizer tokenizer = BertTokenizer.builder( + Arrays.asList(BertTokenizer.PAD_TOKEN, BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN, + "Elastic", "##search", "fun", + "Pancake", "day", + "my", "little", "red", "car", + "God", "##zilla" + ), + new BertTokenization(null, null, 512) + ).build(); + + DistilBertRequestBuilder requestBuilder = new DistilBertRequestBuilder(tokenizer); + NlpTask.Request request = requestBuilder.buildRequest( + List.of("Elasticsearch", + "my little red car", + "Godzilla day"), "request1"); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + + assertEquals("request1", jsonDocAsMap.get("request_id")); + assertThat(jsonDocAsMap.keySet(), hasSize(3)); + assertThat((List>) jsonDocAsMap.get("tokens"), hasSize(3)); + assertThat((List>) jsonDocAsMap.get("arg_1"), hasSize(3)); + + assertEquals(Arrays.asList(1, 3, 4, 2, 0, 0), nthListItemFromMap("tokens", 0, jsonDocAsMap)); + assertEquals(Arrays.asList(1, 1, 1, 1, 0, 0), nthListItemFromMap("arg_1", 0, jsonDocAsMap)); + + assertEquals(Arrays.asList(1, 8, 9, 10, 11, 2), nthListItemFromMap("tokens", 1, jsonDocAsMap)); + assertEquals(Arrays.asList(1, 1, 1, 1, 1, 1), nthListItemFromMap("arg_1", 1, jsonDocAsMap)); + + assertEquals(Arrays.asList(1, 12, 13, 7, 2, 0), nthListItemFromMap("tokens", 2, jsonDocAsMap)); + assertEquals(Arrays.asList(1, 1, 1, 1, 1, 0), nthListItemFromMap("arg_1", 2, jsonDocAsMap)); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java index d2fd58e4fa26f..331cd8deb3748 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java @@ -29,14 +29,14 @@ public class FillMaskProcessorTests extends ESTestCase { public void testProcessResults() { // only the scores of the MASK index array // are used the rest is filler - double[][] scores = { + double[][][] scores = {{ { 0, 0, 0, 0, 0, 0, 0}, // The { 0, 0, 0, 0, 0, 0, 0}, // capital { 0, 0, 0, 0, 0, 0, 0}, // of { 0.01, 0.01, 0.3, 0.1, 0.01, 0.2, 1.2}, // MASK { 0, 0, 0, 0, 0, 0, 0}, // is { 0, 0, 0, 0, 0, 0, 0} // paris - }; + }}; String input = "The capital of " + BertTokenizer.MASK_TOKEN + " is Paris"; @@ -45,7 +45,8 @@ public void testProcessResults() { int[] tokenMap = new int[] {0, 1, 2, 3, 4, 5}; int[] tokenIds = new int[] {0, 1, 2, 3, 4, 5}; - TokenizationResult tokenization = new TokenizationResult(input, vocab, tokens, tokenIds, tokenMap); + TokenizationResult tokenization = new TokenizationResult(vocab); + tokenization.addTokenization(input, tokens, tokenIds, tokenMap); FillMaskConfig config = new FillMaskConfig(new VocabularyConfig("test-index", "vocab"), null); @@ -66,21 +67,19 @@ public void testProcessResults() { } public void testProcessResults_GivenMissingTokens() { - TokenizationResult tokenization = - new TokenizationResult("", Collections.emptyList(), Collections.emptyList(), - new int[] {}, new int[] {}); + TokenizationResult tokenization = new TokenizationResult(Collections.emptyList()); + tokenization.addTokenization("", Collections.emptyList(), new int[] {}, new int[] {}); FillMaskConfig config = new FillMaskConfig(new VocabularyConfig("test-index", "vocab"), null); FillMaskProcessor processor = new FillMaskProcessor(mock(BertTokenizer.class), config); - PyTorchResult pyTorchResult = new PyTorchResult("1", new double[][]{{}}, 0L, null); - + PyTorchResult pyTorchResult = new PyTorchResult("1", new double[][][]{{{}}}, 0L, null); FillMaskResults result = (FillMaskResults) processor.processResult(tokenization, pyTorchResult); assertThat(result.getPredictions(), empty()); } public void testValidate_GivenMissingMaskToken() { - String input = "The capital of France is Paris"; + List input = List.of("The capital of France is Paris"); FillMaskConfig config = new FillMaskConfig(new VocabularyConfig("test-index", "vocab"), null); FillMaskProcessor processor = new FillMaskProcessor(mock(BertTokenizer.class), config); @@ -92,7 +91,7 @@ public void testValidate_GivenMissingMaskToken() { public void testProcessResults_GivenMultipleMaskTokens() { - String input = "The capital of [MASK] is [MASK]"; + List input = List.of("The capital of [MASK] is [MASK]"); FillMaskConfig config = new FillMaskConfig(new VocabularyConfig("test-index", "vocab"), null); FillMaskProcessor processor = new FillMaskProcessor(mock(BertTokenizer.class), config); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java index f9547aa35c3e8..96101cbaf5675 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java @@ -95,7 +95,8 @@ public void testProcessResults() { Arrays.asList("el", "##astic", "##search", "many", "use", "in", "london"), "Many use Elasticsearch in London" ); - double[][] scores = { + + double[][][] scores = {{ { 7, 0, 0, 0, 0, 0, 0, 0, 0}, // many { 7, 0, 0, 0, 0, 0, 0, 0, 0}, // use { 0.01, 0.01, 0, 0.01, 0, 7, 0, 3, 0}, // el @@ -103,7 +104,7 @@ public void testProcessResults() { { 0, 0, 0, 0, 0, 0, 0, 0, 0}, // ##search { 0, 0, 0, 0, 0, 0, 0, 0, 0}, // in { 0, 0, 0, 0, 0, 0, 0, 6, 0} // london - }; + }}; NerResults result = (NerResults) processor.processResult(tokenization, new PyTorchResult("1", scores, 1L, null)); assertThat(result.getEntityGroups().size(), equalTo(2)); @@ -133,13 +134,13 @@ public void testProcessResults_withIobMap() { "Elasticsearch in London" ); - double[][] scores = { + double[][][] scores = {{ { 0.01, 0.01, 0, 0.01, 0, 0, 7, 3, 0}, // el { 0.01, 0.01, 0, 0, 0, 0, 0, 0, 0}, // ##astic { 0, 0, 0, 0, 0, 0, 0, 0, 0}, // ##search { 0, 0, 0, 0, 0, 0, 0, 0, 5}, // in { 6, 0, 0, 0, 0, 0, 0, 0, 0} // london - }; + }}; NerResults result = (NerResults) processor.processResult(tokenization, new PyTorchResult("1", scores, 1L, null)); assertThat(result.getEntityGroups().size(), equalTo(2)); @@ -225,6 +226,6 @@ private static TokenizationResult tokenize(List vocab, String input) { new DistilBertTokenization(true, false, null) ) ).setDoLowerCase(true).setWithSpecialTokens(false).build(); - return tokenizer.tokenize(input); + return tokenizer.tokenize(List.of(input)); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java index a43efc8499c72..e0b3d8bead028 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java @@ -13,11 +13,12 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.DistilBertTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.VocabularyConfig; import org.elasticsearch.xpack.ml.inference.deployment.PyTorchResult; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; +import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; import java.io.IOException; import java.util.Arrays; @@ -35,13 +36,13 @@ public void testInvalidResult() { TextClassificationConfig config = new TextClassificationConfig(new VocabularyConfig("test-index", "vocab"), null, null, null); TextClassificationProcessor processor = new TextClassificationProcessor(mock(BertTokenizer.class), config); { - PyTorchResult torchResult = new PyTorchResult("foo", new double[][] {}, 0L, null); + PyTorchResult torchResult = new PyTorchResult("foo", new double[][][] {}, 0L, null); InferenceResults inferenceResults = processor.processResult(null, torchResult); assertThat(inferenceResults, instanceOf(WarningInferenceResults.class)); assertEquals("Text classification result has no data", ((WarningInferenceResults) inferenceResults).getWarning()); } { - PyTorchResult torchResult = new PyTorchResult("foo", new double[][] { { 1.0 } }, 0L, null); + PyTorchResult torchResult = new PyTorchResult("foo", new double[][][] { { { 1.0 } } }, 0L, null); InferenceResults inferenceResults = processor.processResult(null, torchResult); assertThat(inferenceResults, instanceOf(WarningInferenceResults.class)); assertEquals( @@ -51,23 +52,25 @@ public void testInvalidResult() { } } + @SuppressWarnings("unchecked") public void testBuildRequest() throws IOException { - BertTokenizer tokenizer = BertTokenizer.builder( - Arrays.asList("Elastic", "##search", "fun", BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN), - new BertTokenization(null, null, 512) - ).build(); + NlpTokenizer tokenizer = NlpTokenizer.build( + new Vocabulary( + Arrays.asList("Elastic", "##search", "fun", + BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN, BertTokenizer.PAD_TOKEN)), + new DistilBertTokenization(null, null, 512)); TextClassificationConfig config = new TextClassificationConfig(new VocabularyConfig("test-index", "vocab"), null, null, null); TextClassificationProcessor processor = new TextClassificationProcessor(tokenizer, config); - NlpTask.Request request = processor.buildRequest("Elasticsearch fun", "request1"); + NlpTask.Request request = processor.getRequestBuilder().buildRequest(List.of("Elasticsearch fun"), "request1"); Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(3)); assertEquals("request1", jsonDocAsMap.get("request_id")); - assertEquals(Arrays.asList(3, 0, 1, 2, 4), jsonDocAsMap.get("tokens")); - assertEquals(Arrays.asList(1, 1, 1, 1, 1), jsonDocAsMap.get("arg_1")); + assertEquals(Arrays.asList(3, 0, 1, 2, 4), ((List>)jsonDocAsMap.get("tokens")).get(0)); + assertEquals(Arrays.asList(1, 1, 1, 1, 1), ((List>)jsonDocAsMap.get("arg_1")).get(0)); } public void testValidate() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java index 3456982171789..c176a0c3a2227 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java @@ -13,8 +13,10 @@ import java.util.Arrays; import java.util.Collections; +import java.util.List; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.hasSize; public class BertTokenizerTests extends ESTestCase { @@ -24,7 +26,8 @@ public void testTokenize() { new BertTokenization(null, false, null) ).build(); - TokenizationResult tokenization = tokenizer.tokenize("Elasticsearch fun"); + TokenizationResult tr = tokenizer.tokenize(List.of("Elasticsearch fun")); + TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); assertThat(tokenization.getTokens(), contains("Elastic", "##search", "fun")); assertArrayEquals(new int[] {0, 1, 2}, tokenization.getTokenIds()); assertArrayEquals(new int[] {0, 0, 1}, tokenization.getTokenMap()); @@ -36,7 +39,8 @@ public void testTokenizeAppendSpecialTokens() { Tokenization.createDefault() ).build(); - TokenizationResult tokenization = tokenizer.tokenize("elasticsearch fun"); + TokenizationResult tr = tokenizer.tokenize(List.of("elasticsearch fun")); + TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); assertThat(tokenization.getTokens(), contains("[CLS]", "elastic", "##search", "fun", "[SEP]")); assertArrayEquals(new int[] {3, 0, 1, 2, 4}, tokenization.getTokenIds()); assertArrayEquals(new int[] {-1, 0, 0, 1, -1}, tokenization.getTokenMap()); @@ -52,7 +56,8 @@ public void testNeverSplitTokens() { .setWithSpecialTokens(false) .build(); - TokenizationResult tokenization = tokenizer.tokenize("Elasticsearch " + specialToken + " fun"); + TokenizationResult tr = tokenizer.tokenize(List.of("Elasticsearch " + specialToken + " fun")); + TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); assertThat(tokenization.getTokens(), contains("Elastic", "##search", specialToken, "fun")); assertArrayEquals(new int[] {0, 1, 3, 2}, tokenization.getTokenIds()); assertArrayEquals(new int[] {0, 0, 1, 2}, tokenization.getTokenMap()); @@ -67,12 +72,14 @@ public void testDoLowerCase() { .setWithSpecialTokens(false) .build(); - TokenizationResult tokenization = tokenizer.tokenize("Elasticsearch fun"); + TokenizationResult tr = tokenizer.tokenize(List.of("Elasticsearch fun")); + TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); assertThat(tokenization.getTokens(), contains(BertTokenizer.UNKNOWN_TOKEN, "fun")); assertArrayEquals(new int[] {3, 2}, tokenization.getTokenIds()); assertArrayEquals(new int[] {0, 1}, tokenization.getTokenMap()); - tokenization = tokenizer.tokenize("elasticsearch fun"); + tr = tokenizer.tokenize(List.of("elasticsearch fun")); + tokenization = tr.getTokenizations().get(0); assertThat(tokenization.getTokens(), contains("elastic", "##search", "fun")); } @@ -82,7 +89,8 @@ public void testDoLowerCase() { .setWithSpecialTokens(false) .build(); - TokenizationResult tokenization = tokenizer.tokenize("Elasticsearch fun"); + TokenizationResult tr = tokenizer.tokenize(List.of("Elasticsearch fun")); + TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); assertThat(tokenization.getTokens(), contains("elastic", "##search", "fun")); } } @@ -93,14 +101,54 @@ public void testPunctuation() { Tokenization.createDefault() ).setWithSpecialTokens(false).build(); - TokenizationResult tokenization = tokenizer.tokenize("Elasticsearch, fun."); + TokenizationResult tr = tokenizer.tokenize(List.of("Elasticsearch, fun.")); + TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); assertThat(tokenization.getTokens(), contains("Elastic", "##search", ",", "fun", ".")); assertArrayEquals(new int[] {0, 1, 4, 2, 3}, tokenization.getTokenIds()); assertArrayEquals(new int[] {0, 0, 1, 2, 3}, tokenization.getTokenMap()); - tokenization = tokenizer.tokenize("Elasticsearch, fun [MASK]."); + tr = tokenizer.tokenize(List.of("Elasticsearch, fun [MASK].")); + tokenization = tr.getTokenizations().get(0); assertThat(tokenization.getTokens(), contains("Elastic", "##search", ",", "fun", "[MASK]", ".")); assertArrayEquals(new int[] {0, 1, 4, 2, 5, 3}, tokenization.getTokenIds()); assertArrayEquals(new int[] {0, 0, 1, 2, 3, 4}, tokenization.getTokenMap()); } + + public void testBatchInput() { + BertTokenizer tokenizer = BertTokenizer.builder( + Arrays.asList("Elastic", "##search", "fun", + "Pancake", "day", + "my", "little", "red", "car", + "God", "##zilla" + ), + new BertTokenization(null, false, null) + ).build(); + + TokenizationResult tr = tokenizer.tokenize(List.of("Elasticsearch", + "my little red car", + "Godzilla day", + "Godzilla Pancake red car day" + )); + assertThat(tr.getTokenizations(), hasSize(4)); + + TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); + assertThat(tokenization.getTokens(), contains("Elastic", "##search")); + assertArrayEquals(new int[] {0, 1}, tokenization.getTokenIds()); + assertArrayEquals(new int[] {0, 0}, tokenization.getTokenMap()); + + tokenization = tr.getTokenizations().get(1); + assertThat(tokenization.getTokens(), contains("my", "little", "red", "car")); + assertArrayEquals(new int[] {5, 6, 7, 8}, tokenization.getTokenIds()); + assertArrayEquals(new int[] {0, 1, 2, 3}, tokenization.getTokenMap()); + + tokenization = tr.getTokenizations().get(2); + assertThat(tokenization.getTokens(), contains("God", "##zilla", "day")); + assertArrayEquals(new int[] {9, 10, 4}, tokenization.getTokenIds()); + assertArrayEquals(new int[] {0, 0, 1}, tokenization.getTokenMap()); + + tokenization = tr.getTokenizations().get(3); + assertThat(tokenization.getTokens(), contains("God", "##zilla", "Pancake", "red", "car", "day")); + assertArrayEquals(new int[] {9, 10, 3, 7, 8, 4}, tokenization.getTokenIds()); + assertArrayEquals(new int[] {0, 0, 1, 2, 3, 4}, tokenization.getTokenMap()); + } } From 32e364d39492e50a62a935cc4d3173bb5f4029fc Mon Sep 17 00:00:00 2001 From: Adam Locke Date: Wed, 1 Sep 2021 11:59:11 -0400 Subject: [PATCH 070/128] [DOCS] Clarify indexing a runtime field (#77117) * [DOCS] Clarify indexing a runtime field * Clarify wording based on reviewer feedback --- docs/reference/mapping/runtime.asciidoc | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/docs/reference/mapping/runtime.asciidoc b/docs/reference/mapping/runtime.asciidoc index 2c084b3501041..97d15d9d39d03 100644 --- a/docs/reference/mapping/runtime.asciidoc +++ b/docs/reference/mapping/runtime.asciidoc @@ -87,12 +87,12 @@ your data, but can impact search performance based on the computation defined in the runtime script. To balance search performance and flexibility, index fields that you'll -commonly search for and filter on, such as a timestamp. {es} automatically uses -these indexed fields first when running a query, resulting in a fast response -time. You can then use runtime fields to limit the number of fields that {es} -needs to calculate values for. Using indexed fields in tandem with runtime -fields provides flexibility in the data that you index and how you define -queries for other fields. +frequently search for and filter on, such as a timestamp. {es} automatically +uses these indexed fields first when running a query, resulting in a fast +response time. You can then use runtime fields to limit the number of fields +that {es} needs to calculate values for. Using indexed fields in tandem with +runtime fields provides flexibility in the data that you index and how you +define queries for other fields. Use the <> to run searches that include runtime fields. This method of search helps to offset the performance impacts @@ -810,8 +810,14 @@ can define runtime fields in the <> of an index mapping. If you decide to index a runtime field for greater performance, just move the full runtime field definition (including the script) to the context of an index -mapping. This capability means you can write a script only once, and apply -it to any context that supports runtime fields. +mapping. {es} automatically uses these indexed fields to drive queries, +resulting in a fast response time. This capability means you can write a +script only once, and apply it to any context that supports runtime fields. + +You can then use runtime fields to limit the number of fields that {es} needs +to calculate values for. Using indexed fields in tandem with runtime fields +provides flexibility in the data that you index and how you define queries for +other fields. IMPORTANT: After indexing a runtime field, you cannot update the included script. If you need to change the script, create a new field with the updated From dde7d418a89aecd1c87b49e11b6c06161db335b8 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 1 Sep 2021 12:15:54 -0400 Subject: [PATCH 071/128] Add matching pattern to error in fields (#76903) This adds the pattern into the error message returned when trying to fetch fields. So this: ``` POST _search { "fields": [ { "field": "*", "format": "date_time" } ] } ``` Will return an error message like ``` error fetching [foo] which matches [*]: Field [foo] of type [keyword] doesn't support formats ``` --- .../test/search/330_fetch_fields.yml | 115 ++++++++++++++++++ .../search/fetch/subphase/FieldFetcher.java | 12 +- x-pack/qa/runtime-fields/build.gradle | 3 + 3 files changed, 129 insertions(+), 1 deletion(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml index 70f40c413adf0..eea9df2a5de8c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml @@ -855,6 +855,7 @@ Test nested field with sibling field resolving to DocValueFetcher: - match: { hits.hits.0.fields.number.2 : 3 } - match: { hits.hits.0.fields.number.3 : 5 } - match: { hits.hits.0.fields.number.4 : 6 } + --- Test token_count inside nested field doesn't fail: - skip: @@ -892,3 +893,117 @@ Test token_count inside nested field doesn't fail: body: _source: false fields: [ "*" ] + +--- +error includes field name: + - skip: + version: ' - 7.99.99' + reason: 'error changed in 8.0.0 to be backported to 7.15' + + - do: + indices.create: + index: test + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + keyword: + type: keyword + date: + type: date + + - do: + index: + index: test + id: 1 + refresh: true + body: + keyword: "value" + date: "1990-12-29T22:30:00.000Z" + + - do: + catch: '/error fetching \[keyword\]: Field \[keyword\] of type \[keyword\] doesn''t support formats./' + search: + index: test + body: + fields: + - field: keyword + format: "yyyy/MM/dd" + +--- +error includes glob pattern: + - skip: + version: ' - 7.99.99' + reason: 'error changed in 8.0.0 to be backported to 7.15' + + - do: + indices.create: + index: test + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + dkeyword: + type: keyword + date: + type: date + + - do: + index: + index: test + id: 1 + refresh: true + body: + dkeyword: "value" + date: "1990-12-29T22:30:00.000Z" + + - do: + catch: '/error fetching \[dkeyword\] which matched \[d\*\]: Field \[dkeyword\] of type \[keyword\] doesn''t support formats./' + search: + index: test + body: + fields: + - field: d* + format: "yyyy/MM/dd" + + +--- +error for flattened includes whole path: + - skip: + version: ' - 7.99.99' + reason: 'error changed in 8.0.0 to be backported to 7.15' + + - do: + indices.create: + index: test + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + flattened: + type: flattened + + date: + type: date + + - do: + index: + index: test + id: 1 + refresh: true + body: + flattened: + foo: bar + date: "1990-12-29T22:30:00.000Z" + + - do: + catch: '/error fetching \[flattened.bar\]: Field \[flattened.bar\] of type \[flattened\] doesn''t support formats./' + search: + index: test + body: + fields: + - field: flattened.bar + format: "yyyy/MM/dd" diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java index 9f39a2d9a3877..62fafa7f0caf7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java @@ -92,7 +92,17 @@ private static FieldFetcher create(SearchExecutionContext context, } // only add concrete fields if they are not beneath a known nested field if (nestedParentPath == null) { - ValueFetcher valueFetcher = ft.valueFetcher(context, fieldAndFormat.format); + ValueFetcher valueFetcher; + try { + valueFetcher = ft.valueFetcher(context, fieldAndFormat.format); + } catch (IllegalArgumentException e) { + StringBuilder error = new StringBuilder("error fetching [").append(field).append(']'); + if (isWildcardPattern) { + error.append(" which matched [").append(fieldAndFormat.field).append(']'); + } + error.append(": ").append(e.getMessage()); + throw new IllegalArgumentException(error.toString(), e); + } fieldContexts.put(field, new FieldContext(field, valueFetcher)); } } diff --git a/x-pack/qa/runtime-fields/build.gradle b/x-pack/qa/runtime-fields/build.gradle index e5f5c228e6e4a..d984bdc79629e 100644 --- a/x-pack/qa/runtime-fields/build.gradle +++ b/x-pack/qa/runtime-fields/build.gradle @@ -80,6 +80,9 @@ subprojects { 'search.aggregation/20_terms/Global ordinals are loaded with the global_ordinals execution hint', 'search.aggregation/170_cardinality_metric/profiler string', 'search.aggregation/235_composite_sorted/*', + // The error messages are different + 'search/330_fetch_fields/error includes field name', + 'search/330_fetch_fields/error includes glob pattern', /////// NOT SUPPORTED /////// ].join(',') } From 4a4d604463014bfca49823dc80de38e58f51e5a4 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 31 Aug 2021 16:21:30 +0200 Subject: [PATCH 072/128] Add 7.14.2 version. --- .ci/bwcVersions | 1 + server/src/main/java/org/elasticsearch/Version.java | 1 + 2 files changed, 2 insertions(+) diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 8312b5969cae6..bc7e49a59467f 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -40,6 +40,7 @@ BWC_VERSION: - "7.13.4" - "7.14.0" - "7.14.1" + - "7.14.2" - "7.15.0" - "7.16.0" - "8.0.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index c86b7db1a4186..1f071c9d8f4ed 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -87,6 +87,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_7_13_4 = new Version(7130499, org.apache.lucene.util.Version.LUCENE_8_8_2); public static final Version V_7_14_0 = new Version(7140099, org.apache.lucene.util.Version.LUCENE_8_9_0); public static final Version V_7_14_1 = new Version(7140199, org.apache.lucene.util.Version.LUCENE_8_9_0); + public static final Version V_7_14_2 = new Version(7140299, org.apache.lucene.util.Version.LUCENE_8_9_0); public static final Version V_7_15_0 = new Version(7150099, org.apache.lucene.util.Version.LUCENE_8_9_0); public static final Version V_7_16_0 = new Version(7160099, org.apache.lucene.util.Version.LUCENE_8_9_0); public static final Version V_8_0_0 = new Version(8000099, org.apache.lucene.util.Version.LUCENE_8_9_0); From 007469af6352abe25c43b4badce5542b28e67fe8 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 1 Sep 2021 10:26:06 -0700 Subject: [PATCH 073/128] [DOCS] Replaces index pattern in ML docs (#77041) --- .../ml/anomaly-detection/ml-configuring-url.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-url.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-url.asciidoc index bd46bbd01e98b..032533df06ee0 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-url.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-url.asciidoc @@ -21,7 +21,7 @@ image::images/ml-customurl-edit.gif["Add a custom URL in {kib}",width=75%] For each custom URL, you must supply the URL and a label, which is the link text that appears in the anomalies table. You can also optionally supply a time range. When you link to *Discover* or a {kib} dashboard, you'll have additional -options for specifying the pertinent index pattern or dashboard name and query +options for specifying the pertinent {data-source} or dashboard name and query entities. [discrete] @@ -90,7 +90,7 @@ your web browser so that it does not block pop-up windows or create an exception for your {kib} URL. * When creating a link to a {kib} dashboard, the URLs for dashboards can be very long. Be careful of typos, end of line characters, and URL encoding. Also ensure -you use the appropriate index ID for the target {kib} index pattern. +you use the appropriate index ID for the target {kib} {data-source}. * If you use an influencer name for string substitution, keep in mind that it might not always be available in the analysis results and the URL is invalid in those cases. There is not always a statistically significant influencer for each From 8b9c52ebe36d2a569ad8cd9a48917a859071e271 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 1 Sep 2021 20:45:22 +0200 Subject: [PATCH 074/128] Fix Queued Snapshot Clone not Starting after Data Node Drops Out (#77111) We have to account for queued up clones when dealing with nodes dropping out and start them when they become ready to execute because of a node leaving the cluster. Added test to reproduce the issue in #77101 and another test to verify that the more complex case of clone queued after snapshot queued after clone still works correctly as well. The solution here is the most direct fix I could think of and the by far easiest to backport. That said, I added a TODO that asks for a follow-up that should allow for completely removing the duplicate code across handling shard updates and external changes. The difference between the two ways of updating the state is a left-over from the time before we had concurrent operations and has become a needless complexity nowadays. closes #77101 --- .../snapshots/CloneSnapshotIT.java | 28 ++++++ .../snapshots/ConcurrentSnapshotsIT.java | 60 ++++++++++++ .../snapshots/SnapshotsService.java | 94 +++++++++++++++---- 3 files changed, 163 insertions(+), 19 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java index d756056f1ba77..3f0e055994eda 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/CloneSnapshotIT.java @@ -794,6 +794,34 @@ public void testRemoveFailedCloneFromCSWithQueuedSnapshotInProgress() throws Exc assertAcked(startDeleteSnapshot(repoName, sourceSnapshot).get()); } + public void testCloneAfterFailedShardSnapshot() throws Exception { + final String masterNode = internalCluster().startMasterOnlyNode(); + final String dataNode = internalCluster().startDataOnlyNode(); + final String repoName = "test-repo"; + createRepository(repoName, "mock"); + final String testIndex = "index-test"; + createIndex(testIndex); + final String sourceSnapshot = "source-snapshot"; + createFullSnapshot(repoName, sourceSnapshot); + indexRandomDocs(testIndex, randomIntBetween(1, 100)); + blockDataNode(repoName, dataNode); + final ActionFuture snapshotFuture = client(masterNode).admin() + .cluster() + .prepareCreateSnapshot(repoName, "full-snapshot") + .execute(); + awaitNumberOfSnapshotsInProgress(1); + waitForBlock(dataNode, repoName); + final ActionFuture cloneFuture = client(masterNode).admin() + .cluster() + .prepareCloneSnapshot(repoName, sourceSnapshot, "target-snapshot") + .setIndices(testIndex) + .execute(); + awaitNumberOfSnapshotsInProgress(2); + internalCluster().stopNode(dataNode); + assertAcked(cloneFuture.get()); + assertTrue(snapshotFuture.isDone()); + } + private ActionFuture startCloneFromDataNode( String repoName, String sourceSnapshot, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 00bac57b9238c..34eabc4e17298 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -1690,6 +1690,66 @@ public void testIndexDeletedWhileSnapshotQueuedAfterClone() throws Exception { ); } + public void testIndexDeletedWhileSnapshotAndCloneQueuedAfterClone() throws Exception { + final String master = internalCluster().startMasterOnlyNode(LARGE_SNAPSHOT_POOL_SETTINGS); + internalCluster().startDataOnlyNode(); + final String index1 = "index-1"; + final String index2 = "index-2"; + createIndexWithContent(index1); + createIndexWithContent(index2); + + final String repository = "test-repo"; + createRepository(repository, "mock"); + + final String sourceSnapshot = "source-snapshot"; + createFullSnapshot(repository, sourceSnapshot); + + final IndexId index1Id = getRepositoryData(repository).resolveIndexId(index1); + blockMasterOnShardLevelSnapshotFile(repository, index1Id.getId()); + + final String cloneTarget = "target-snapshot"; + final ActionFuture cloneSnapshot = clusterAdmin().prepareCloneSnapshot( + repository, + sourceSnapshot, + cloneTarget + ).setIndices(index1, index2).execute(); + awaitNumberOfSnapshotsInProgress(1); + waitForBlock(master, repository); + + final ActionFuture snapshot3 = clusterAdmin().prepareCreateSnapshot(repository, "snapshot-3") + .setIndices(index1, index2) + .setWaitForCompletion(true) + .setPartial(true) + .execute(); + final ActionFuture snapshot2 = clusterAdmin().prepareCreateSnapshot(repository, "snapshot-2") + .setIndices(index2) + .setWaitForCompletion(true) + .execute(); + assertSuccessful(snapshot2); + awaitNumberOfSnapshotsInProgress(2); + assertFalse(snapshot3.isDone()); + + final String cloneTarget2 = "target-snapshot-2"; + final ActionFuture cloneSnapshot2 = clusterAdmin().prepareCloneSnapshot( + repository, + sourceSnapshot, + cloneTarget2 + ).setIndices(index1, index2).execute(); + + assertAcked(admin().indices().prepareDelete(index1).get()); + assertSuccessful(snapshot3); + unblockNode(repository, master); + + assertAcked(cloneSnapshot.get()); + assertAcked(cloneSnapshot2.get()); + assertAcked(startDeleteSnapshot(repository, cloneTarget).get()); + + assertThat( + clusterAdmin().prepareSnapshotStatus().setSnapshots("snapshot-2", "snapshot-3").setRepository(repository).get().getSnapshots(), + hasSize(2) + ); + } + public void testQueuedAfterFailedShardSnapshot() throws Exception { internalCluster().startMasterOnlyNode(); final String dataNode = internalCluster().startDataOnlyNode(); diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 2db67625506c0..a8f389300e62b 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -1105,6 +1105,10 @@ private void processExternalChanges(boolean changedNodes, boolean startShards) { public ClusterState execute(ClusterState currentState) { RoutingTable routingTable = currentState.routingTable(); final SnapshotsInProgress snapshots = currentState.custom(SnapshotsInProgress.TYPE, SnapshotsInProgress.EMPTY); + final SnapshotDeletionsInProgress deletes = currentState.custom( + SnapshotDeletionsInProgress.TYPE, + SnapshotDeletionsInProgress.EMPTY + ); DiscoveryNodes nodes = currentState.nodes(); boolean changed = false; final EnumSet statesToUpdate; @@ -1122,19 +1126,73 @@ public ClusterState execute(ClusterState currentState) { // We keep a cache of shards that failed in this map. If we fail a shardId for a given repository because of // a node leaving or shard becoming unassigned for one snapshot, we will also fail it for all subsequent enqueued - // snapshots - // for the same repository - final Map> knownFailures = new HashMap<>(); - + // snapshots for the same repository + // TODO: the code in this state update duplicates large chunks of the logic in #SHARD_STATE_EXECUTOR. + // We should refactor it to ideally also go through #SHARD_STATE_EXECUTOR by hand-crafting shard state updates + // that encapsulate nodes leaving or indices having been deleted and passing them to the executor instead. + final Map> knownFailures = new HashMap<>(); for (final SnapshotsInProgress.Entry snapshot : snapshots.entries()) { if (statesToUpdate.contains(snapshot.state())) { - // Currently initializing clone - if (snapshot.isClone() && snapshot.shardsByRepoShardId().isEmpty()) { - if (initializingClones.contains(snapshot.snapshot())) { - updatedSnapshotEntries.add(snapshot); + if (snapshot.isClone()) { + if (snapshot.shardsByRepoShardId().isEmpty()) { + // Currently initializing clone + if (initializingClones.contains(snapshot.snapshot())) { + updatedSnapshotEntries.add(snapshot); + } else { + logger.debug("removing not yet start clone operation [{}]", snapshot); + changed = true; + } } else { - logger.debug("removing not yet start clone operation [{}]", snapshot); - changed = true; + // see if any clones may have had a shard become available for execution because of failures + if (deletes.hasExecutingDeletion(snapshot.repository())) { + // Currently executing a delete for this repo, no need to try and update any clone operations. + // The logic for finishing the delete will update running clones with the latest changes. + updatedSnapshotEntries.add(snapshot); + continue; + } + ImmutableOpenMap.Builder clones = null; + InFlightShardSnapshotStates inFlightShardSnapshotStates = null; + for (Map.Entry failureEntry : knownFailures.getOrDefault( + snapshot.repository(), + Collections.emptyMap() + ).entrySet()) { + final RepositoryShardId repositoryShardId = failureEntry.getKey(); + final ShardSnapshotStatus existingStatus = snapshot.shardsByRepoShardId().get(repositoryShardId); + if (ShardSnapshotStatus.UNASSIGNED_QUEUED.equals(existingStatus)) { + if (inFlightShardSnapshotStates == null) { + inFlightShardSnapshotStates = InFlightShardSnapshotStates.forRepo( + snapshot.repository(), + updatedSnapshotEntries + ); + } + if (inFlightShardSnapshotStates.isActive( + repositoryShardId.indexName(), + repositoryShardId.shardId() + )) { + // we already have this shard assigned to another task + continue; + } + if (clones == null) { + clones = ImmutableOpenMap.builder(snapshot.shardsByRepoShardId()); + } + // We can use the generation from the shard failure to start the clone operation here + // because #processWaitingShardsAndRemovedNodes adds generations to failure statuses that allow + // us to start another clone. + // The usual route via InFlightShardSnapshotStates is not viable here because it would require + // a consistent view of the RepositoryData which we don't have here because this state update + // runs over all repositories at once. + clones.put( + repositoryShardId, + new ShardSnapshotStatus(nodes.getLocalNodeId(), failureEntry.getValue().generation()) + ); + } + } + if (clones != null) { + changed = true; + updatedSnapshotEntries.add(snapshot.withClones(clones.build())); + } else { + updatedSnapshotEntries.add(snapshot); + } } } else { ImmutableOpenMap shards = processWaitingShardsAndRemovedNodes( @@ -1236,11 +1294,9 @@ private static ImmutableOpenMap processWaitingShar SnapshotsInProgress.Entry entry, RoutingTable routingTable, DiscoveryNodes nodes, - Map knownFailures + Map knownFailures ) { - if (entry.isClone()) { - return null; - } + assert entry.isClone() == false : "clones take a different path"; boolean snapshotChanged = false; ImmutableOpenMap.Builder shards = ImmutableOpenMap.builder(); for (ObjectObjectCursor shardEntry : entry.shardsByRepoShardId()) { @@ -1248,16 +1304,16 @@ private static ImmutableOpenMap processWaitingShar ShardId shardId = entry.shardId(shardEntry.key); if (shardStatus.equals(ShardSnapshotStatus.UNASSIGNED_QUEUED)) { // this shard snapshot is waiting for a previous snapshot to finish execution for this shard - final ShardSnapshotStatus knownFailure = knownFailures.get(shardId); + final ShardSnapshotStatus knownFailure = knownFailures.get(shardEntry.key); if (knownFailure == null) { final IndexRoutingTable indexShardRoutingTable = routingTable.index(shardId.getIndex()); if (indexShardRoutingTable == null) { - // shard became unassigned while queued so we fail as missing here + // shard became unassigned while queued after a delete or clone operation so we can fail as missing here assert entry.partial(); snapshotChanged = true; logger.debug("failing snapshot of shard [{}] because index got deleted", shardId); shards.put(shardId, ShardSnapshotStatus.MISSING); - knownFailures.put(shardId, ShardSnapshotStatus.MISSING); + knownFailures.put(shardEntry.key, ShardSnapshotStatus.MISSING); } else { // if no failure is known for the shard we keep waiting shards.put(shardId, shardStatus); @@ -1299,7 +1355,7 @@ private static ImmutableOpenMap processWaitingShar shardStatus.generation() ); shards.put(shardId, failedState); - knownFailures.put(shardId, failedState); + knownFailures.put(shardEntry.key, failedState); } else if (shardStatus.state().completed() == false && shardStatus.nodeId() != null) { if (nodes.nodeExists(shardStatus.nodeId())) { shards.put(shardId, shardStatus); @@ -1314,7 +1370,7 @@ private static ImmutableOpenMap processWaitingShar shardStatus.generation() ); shards.put(shardId, failedState); - knownFailures.put(shardId, failedState); + knownFailures.put(shardEntry.key, failedState); } } else { shards.put(shardId, shardStatus); From 9570236e4de78f4960ea700be742472ed49af5b7 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 1 Sep 2021 21:20:29 +0200 Subject: [PATCH 075/128] EQL: Sequences will now support nano-timestamps (#76953) With this change nanosecond-resolution timestamps can be used to drive the EQL sequence query. --- .../elasticsearch/test/eql/DataLoader.java | 52 +- .../test/eql/EqlDateNanosSpecTestCase.java | 9 +- .../test/eql/EqlExtraSpecTestCase.java | 3 +- .../elasticsearch/test/eql/EqlSpecLoader.java | 12 +- .../test/eql/EqlSpecTestCase.java | 9 +- ...anos.mapping => endgame-140-nanos.mapping} | 0 .../main/resources/data/eql_date_nanos.data | 2081 ----------------- .../src/main/resources/test_queries.toml | 3 +- .../resources/test_queries_date_nanos.toml | 6 +- .../xpack/eql/EqlDateNanosIT.java | 4 +- .../assembler/BoxedQueryRequest.java | 5 +- .../eql/execution/assembler/Criterion.java | 9 +- .../eql/execution/search/MillisTimestamp.java | 42 + .../eql/execution/search/NanosTimestamp.java | 31 + .../xpack/eql/execution/search/Ordinal.java | 22 +- .../xpack/eql/execution/search/Timestamp.java | 57 + .../extractor/TimestampFieldHitExtractor.java | 4 +- .../execution/sequence/SequenceMatcher.java | 6 +- .../assembler/ImplicitTiebreakerTests.java | 2 +- .../assembler/SequenceSpecTests.java | 5 +- .../CriterionOrdinalExtractionTests.java | 13 +- .../eql/execution/search/OrdinalTests.java | 57 +- .../sequence/CircuitBreakerTests.java | 8 +- 23 files changed, 264 insertions(+), 2176 deletions(-) rename x-pack/plugin/eql/qa/common/src/main/resources/data/{eql_date_nanos.mapping => endgame-140-nanos.mapping} (100%) delete mode 100644 x-pack/plugin/eql/qa/common/src/main/resources/data/eql_date_nanos.data create mode 100644 x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/MillisTimestamp.java create mode 100644 x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/NanosTimestamp.java create mode 100644 x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/Timestamp.java diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java index ca19133f46714..9fc49568568a1 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.test.eql; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertThat; @@ -18,6 +19,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.function.Consumer; import org.apache.http.HttpHost; import org.apache.logging.log4j.LogManager; @@ -44,20 +46,21 @@ * Loads EQL dataset into ES. * * Checks for predefined indices: - * - endgame-140 - for existing data - * - extra - additional data + * - endgame-140 - for existing data + * - endgame-140-nanos - same as endgame-140, but with nano-precision timestamps + * - extra - additional data * * While the loader could be made generic, the queries are bound to each index and generalizing that would make things way too complicated. */ public class DataLoader { public static final String TEST_INDEX = "endgame-140"; public static final String TEST_EXTRA_INDEX = "extra"; - public static final String DATE_NANOS_INDEX = "eql_date_nanos"; + public static final String TEST_NANOS_INDEX = "endgame-140-nanos"; private static final Map replacementPatterns = Collections.unmodifiableMap(getReplacementPatterns()); - private static final long FILETIME_EPOCH_DIFF = 11644473600000L; - private static final long FILETIME_ONE_MILLISECOND = 10 * 1000; + private static final long FILETIME_EPOCH_DIFF = 11644473600000L; // millis delta from the start of year 1601 (Windows filetime) to 1970 + private static final long FILETIME_ONE_MILLISECOND = 10 * 1000; // Windows filetime is in 100-nanoseconds ticks // runs as java main private static boolean main = false; @@ -86,33 +89,34 @@ public static void loadDatasetIntoEs(RestHighLevelClient client, // // Main Index // - load(client, TEST_INDEX, true, p); + load(client, TEST_INDEX, null, DataLoader::timestampToUnixMillis, p); // // Aux Index // - load(client, TEST_EXTRA_INDEX, false, p); + load(client, TEST_EXTRA_INDEX, null, null, p); // // Date_Nanos index // - // The data for this index are identical to the endgame-140.data with only the values for @timestamp changed. + // The data for this index is loaded from the same endgame-140.data sample, only having the mapping for @timestamp changed: the + // chosen Windows filetime timestamps (2017+) can coincidentally also be readily used as nano-resolution unix timestamps (1973+). // There are mixed values with and without nanos precision so that the filtering is properly tested for both cases. - load(client, DATE_NANOS_INDEX, false, p); + load(client, TEST_NANOS_INDEX, TEST_INDEX, DataLoader::timestampToUnixNanos, p); } - private static void load(RestHighLevelClient client, String indexName, boolean winFileTime, + private static void load(RestHighLevelClient client, String indexName, String dataName, Consumer> datasetTransform, CheckedBiFunction p) throws IOException { String name = "/data/" + indexName + ".mapping"; URL mapping = DataLoader.class.getResource(name); if (mapping == null) { throw new IllegalArgumentException("Cannot find resource " + name); } - name = "/data/" + indexName + ".data"; + name = "/data/" + (dataName != null ? dataName : indexName) + ".data"; URL data = DataLoader.class.getResource(name); if (data == null) { throw new IllegalArgumentException("Cannot find resource " + name); } createTestIndex(client, indexName, readMapping(mapping)); - loadData(client, indexName, winFileTime, data, p); + loadData(client, indexName, datasetTransform, data, p); } private static void createTestIndex(RestHighLevelClient client, String indexName, String mapping) throws IOException { @@ -147,8 +151,8 @@ private static CharSequence randomOf(String...values) { } @SuppressWarnings("unchecked") - private static void loadData(RestHighLevelClient client, String indexName, boolean winfileTime, URL resource, - CheckedBiFunction p) + private static void loadData(RestHighLevelClient client, String indexName, Consumer> datasetTransform, + URL resource, CheckedBiFunction p) throws IOException { BulkRequest bulk = new BulkRequest(); bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); @@ -158,8 +162,8 @@ private static void loadData(RestHighLevelClient client, String indexName, boole for (Object item : list) { assertThat(item, instanceOf(Map.class)); Map entry = (Map) item; - if (winfileTime) { - transformDataset(entry); + if (datasetTransform != null) { + datasetTransform.accept(entry); } bulk.add(new IndexRequest(indexName).source(entry, XContentType.JSON)); } @@ -175,7 +179,7 @@ private static void loadData(RestHighLevelClient client, String indexName, boole } } - private static void transformDataset(Map entry) { + private static void timestampToUnixMillis(Map entry) { Object object = entry.get("timestamp"); assertThat(object, instanceOf(Long.class)); Long ts = (Long) object; @@ -183,6 +187,20 @@ private static void transformDataset(Map entry) { entry.put("@timestamp", winFileTimeToUnix(ts)); } + private static void timestampToUnixNanos(Map entry) { + Object object = entry.get("timestamp"); + assertThat(object, instanceOf(Long.class)); + // interpret the value as nanos since the unix epoch + String timestamp = object.toString(); + assertThat(timestamp.length(), greaterThan(12)); + // avoid double approximations and BigDecimal ops + String millis = timestamp.substring(0, 12); + String milliFraction = timestamp.substring(12); + // strip the fractions right away if not actually present + entry.put("@timestamp", milliFraction.equals("000000") ? millis : millis + "." + milliFraction); + entry.put("timestamp", ((long) object)/1_000_000L); + } + public static long winFileTimeToUnix(final long filetime) { long ts = (filetime / FILETIME_ONE_MILLISECOND); return ts - FILETIME_EPOCH_DIFF; diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlDateNanosSpecTestCase.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlDateNanosSpecTestCase.java index 1f9f6a63f6c70..3db287fb203bd 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlDateNanosSpecTestCase.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlDateNanosSpecTestCase.java @@ -7,23 +7,22 @@ package org.elasticsearch.test.eql; -import static org.elasticsearch.test.eql.DataLoader.DATE_NANOS_INDEX; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import java.util.HashSet; import java.util.List; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import static org.elasticsearch.test.eql.DataLoader.TEST_NANOS_INDEX; public abstract class EqlDateNanosSpecTestCase extends BaseEqlSpecTestCase { @ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTING) public static List readTestSpecs() throws Exception { - return asArray(EqlSpecLoader.load("/test_queries_date_nanos.toml", new HashSet<>())); + return asArray(EqlSpecLoader.load("/test_queries_date_nanos.toml", "/test_queries.toml")); } // constructor for "local" rest tests public EqlDateNanosSpecTestCase(String query, String name, long[] eventIds) { - this(DATE_NANOS_INDEX, query, name, eventIds); + this(TEST_NANOS_INDEX, query, name, eventIds); } // constructor for multi-cluster tests diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlExtraSpecTestCase.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlExtraSpecTestCase.java index a7ab7bb975c75..a669feb3c179f 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlExtraSpecTestCase.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlExtraSpecTestCase.java @@ -9,7 +9,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import java.util.HashSet; import java.util.List; import static org.elasticsearch.test.eql.DataLoader.TEST_EXTRA_INDEX; @@ -18,7 +17,7 @@ public abstract class EqlExtraSpecTestCase extends BaseEqlSpecTestCase { @ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTING) public static List readTestSpecs() throws Exception { - return asArray(EqlSpecLoader.load("/test_extra.toml", new HashSet<>())); + return asArray(EqlSpecLoader.load("/test_extra.toml")); } // constructor for "local" rest tests diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlSpecLoader.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlSpecLoader.java index 34b4970300fbe..e99c0e153fd50 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlSpecLoader.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlSpecLoader.java @@ -15,6 +15,7 @@ import java.io.InputStream; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -29,6 +30,15 @@ public static List load(String path, Set uniqueTestNames) throw } } + public static List load(String ...paths) throws Exception { + Set uniqueTestNames = new HashSet<>(); + List specs = new ArrayList<>(); + for (String path: paths) { + specs.addAll(load(path, uniqueTestNames)); + } + return specs; + } + private static void validateAndAddSpec(List specs, EqlSpec spec, Set uniqueTestNames) { if (Strings.isNullOrEmpty(spec.name())) { throw new IllegalArgumentException("Read a test without a name value"); @@ -41,7 +51,7 @@ private static void validateAndAddSpec(List specs, EqlSpec spec, Set readTestSpecs() throws Exception { // Load EQL validation specs - Set uniqueTestNames = new HashSet<>(); - List specs = EqlSpecLoader.load("/test_queries.toml", uniqueTestNames); - specs.addAll(EqlSpecLoader.load("/additional_test_queries.toml", uniqueTestNames)); - specs.addAll(EqlSpecLoader.load("/test_queries_date.toml", uniqueTestNames)); - - return asArray(specs); + return asArray(EqlSpecLoader.load("/test_queries.toml", "/additional_test_queries.toml", "/test_queries_date.toml")); } @Override diff --git a/x-pack/plugin/eql/qa/common/src/main/resources/data/eql_date_nanos.mapping b/x-pack/plugin/eql/qa/common/src/main/resources/data/endgame-140-nanos.mapping similarity index 100% rename from x-pack/plugin/eql/qa/common/src/main/resources/data/eql_date_nanos.mapping rename to x-pack/plugin/eql/qa/common/src/main/resources/data/endgame-140-nanos.mapping diff --git a/x-pack/plugin/eql/qa/common/src/main/resources/data/eql_date_nanos.data b/x-pack/plugin/eql/qa/common/src/main/resources/data/eql_date_nanos.data deleted file mode 100644 index 8ca3804725f49..0000000000000 --- a/x-pack/plugin/eql/qa/common/src/main/resources/data/eql_date_nanos.data +++ /dev/null @@ -1,2081 +0,0 @@ - -[ - { - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "opcode": 3, - "pid": 0, - "process_name": "System Idle Process", - "serial_event_id": 1, - "subtype": "create", - "@timestamp": "1970-01-02T08:20:44.000Z", - "unique_pid": 1 - }, - { - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "opcode": 3, - "parent_process_name": "System Idle Process", - "pid": 4, - "process_name": "System", - "serial_event_id": 2, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.510Z", - "unique_pid": 2, - "unique_ppid": 1, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "\\SystemRoot\\System32\\smss.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "63d3c30b497347495b8ea78a38188969", - "opcode": 3, - "parent_process_name": "System", - "pid": 284, - "ppid": 4, - "process_name": "smss.exe", - "process_path": "C:\\Windows\\System32\\smss.exe", - "serial_event_id": 3, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.510Z", - "unique_pid": 3, - "unique_ppid": 2, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "%SystemRoot%\\system32\\csrss.exe ObjectDirectory=\\Windows SharedSection=1024,20480,768 Windows=On SubSystemType=Windows ServerDll=basesrv,1 ServerDll=winsrv:UserServerDllInitialization,3 ServerDll=winsrv:ConServerDllInitialization,2 ServerDll=sxssrv,4 ProfileControl=Off MaxRequestThreads=16", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "60c2862b4bf0fd9f582ef344c2b1ec72", - "opcode": 3, - "pid": 372, - "ppid": 364, - "process_name": "csrss.exe", - "process_path": "C:\\Windows\\System32\\csrss.exe", - "serial_event_id": 4, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.510Z", - "unique_pid": 4, - "unique_ppid": 0, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "wininit.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "94355c28c1970635a31b3fe52eb7ceba", - "opcode": 3, - "pid": 424, - "ppid": 364, - "process_name": "wininit.exe", - "process_path": "C:\\Windows\\System32\\wininit.exe", - "serial_event_id": 5, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.510Z", - "unique_pid": 5, - "unique_ppid": 0, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "%SystemRoot%\\system32\\csrss.exe ObjectDirectory=\\Windows SharedSection=1024,20480,768 Windows=On SubSystemType=Windows ServerDll=basesrv,1 ServerDll=winsrv:UserServerDllInitialization,3 ServerDll=winsrv:ConServerDllInitialization,2 ServerDll=sxssrv,4 ProfileControl=Off MaxRequestThreads=16", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "60c2862b4bf0fd9f582ef344c2b1ec72", - "opcode": 3, - "pid": 436, - "ppid": 416, - "process_name": "csrss.exe", - "process_path": "C:\\Windows\\System32\\csrss.exe", - "serial_event_id": 6, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.510Z", - "unique_pid": 6, - "unique_ppid": 0, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "winlogon.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "1151b1baa6f350b1db6598e0fea7c457", - "opcode": 3, - "pid": 472, - "ppid": 416, - "process_name": "winlogon.exe", - "process_path": "C:\\Windows\\System32\\winlogon.exe", - "serial_event_id": 7, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.510Z", - "unique_pid": 7, - "unique_ppid": 0, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\services.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "24acb7e5be595468e3b9aa488b9b4fcb", - "opcode": 3, - "parent_process_name": "wininit.exe", - "parent_process_path": "C:\\Windows\\System32\\wininit.exe", - "pid": 524, - "ppid": 424, - "process_name": "services.exe", - "process_path": "C:\\Windows\\System32\\services.exe", - "serial_event_id": 8, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.520Z", - "unique_pid": 8, - "unique_ppid": 5, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\lsass.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "7554a1b82b4a222fd4cc292abd38a558", - "opcode": 3, - "parent_process_name": "wininit.exe", - "parent_process_path": "C:\\Windows\\System32\\wininit.exe", - "pid": 536, - "ppid": 424, - "process_name": "lsass.exe", - "process_path": "C:\\Windows\\System32\\lsass.exe", - "serial_event_id": 9, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.520Z", - "unique_pid": 9, - "unique_ppid": 5, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\lsm.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "9662ee182644511439f1c53745dc1c88", - "opcode": 3, - "parent_process_name": "wininit.exe", - "parent_process_path": "C:\\Windows\\System32\\wininit.exe", - "pid": 544, - "ppid": 424, - "process_name": "lsm.exe", - "process_path": "C:\\Windows\\System32\\lsm.exe", - "serial_event_id": 10, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.520Z", - "unique_pid": 10, - "unique_ppid": 5, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\svchost.exe -k DcomLaunch", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 648, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 11, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.520Z", - "unique_pid": 11, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "\"C:\\Program Files\\VMware\\VMware Tools\\vmacthlp.exe\"", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "3c4d41c4f8cdd2ca945e91a61e6cfbaf", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 708, - "ppid": 524, - "process_name": "vmacthlp.exe", - "process_path": "C:\\Program Files\\VMware\\VMware Tools\\vmacthlp.exe", - "serial_event_id": 12, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.520Z", - "unique_pid": 12, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\svchost.exe -k RPCSS", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 752, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 13, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.520Z", - "unique_pid": 13, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "NETWORK SERVICE" - }, - { - "command_line": "\"LogonUI.exe\" /flags:0x0", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "715f03b4c7223349768013ea95d9e5b7", - "opcode": 3, - "parent_process_name": "winlogon.exe", - "parent_process_path": "C:\\Windows\\System32\\winlogon.exe", - "pid": 828, - "ppid": 472, - "process_name": "LogonUI.exe", - "process_path": "C:\\Windows\\System32\\LogonUI.exe", - "serial_event_id": 14, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.520Z", - "unique_pid": 14, - "unique_ppid": 7, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\System32\\svchost.exe -k LocalServiceNetworkRestricted", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 848, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 15, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.520Z", - "unique_pid": 15, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "LOCAL SERVICE" - }, - { - "command_line": "C:\\Windows\\System32\\svchost.exe -k LocalSystemNetworkRestricted", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 896, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 16, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.520Z", - "unique_pid": 16, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\svchost.exe -k netsvcs", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 924, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 17, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.520Z", - "unique_pid": 17, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\svchost.exe -k LocalService", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 264, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 18, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.530Z", - "unique_pid": 18, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "LOCAL SERVICE" - }, - { - "command_line": "C:\\Windows\\system32\\svchost.exe -k NetworkService", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 968, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 19, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.530Z", - "unique_pid": 19, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "NETWORK SERVICE" - }, - { - "command_line": "C:\\Windows\\System32\\spoolsv.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "b96c17b5dc1424d56eea3a99e97428cd", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 1108, - "ppid": 524, - "process_name": "spoolsv.exe", - "process_path": "C:\\Windows\\System32\\spoolsv.exe", - "serial_event_id": 20, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.530Z", - "unique_pid": 20, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\svchost.exe -k LocalServiceNoNetwork", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 1136, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 21, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.530Z", - "unique_pid": 21, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "LOCAL SERVICE" - }, - { - "command_line": "\"C:\\Program Files\\VMware\\VMware Tools\\VMware VGAuth\\VGAuthService.exe\"", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "ccd745aa6425c7637a34ff12ed8a1c18", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 1320, - "ppid": 524, - "process_name": "VGAuthService.exe", - "process_path": "C:\\Program Files\\VMware\\VMware Tools\\VMware VGAuth\\VGAuthService.exe", - "serial_event_id": 22, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.530Z", - "unique_pid": 22, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\"", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "404202d6f0628331aaade8c8f9ef6feb", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 1344, - "ppid": 524, - "process_name": "vmtoolsd.exe", - "process_path": "C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe", - "serial_event_id": 23, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.530Z", - "unique_pid": 23, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "\"C:\\Program Files\\VMware\\VMware Tools\\VMware CAF\\pme\\bin\\ManagementAgentHost.exe\"", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "3f61b1a4fe078bb7705b508cfcbb987e", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 1376, - "ppid": 524, - "process_name": "ManagementAgentHost.exe", - "process_path": "C:\\Program Files\\VMware\\VMware Tools\\VMware CAF\\pme\\bin\\ManagementAgentHost.exe", - "serial_event_id": 24, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.530Z", - "unique_pid": 24, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\svchost.exe -k NetworkServiceNetworkRestricted", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 1692, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 25, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.540Z", - "unique_pid": 25, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "NETWORK SERVICE" - }, - { - "command_line": "C:\\Windows\\system32\\wbem\\wmiprvse.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "8f4ecbbfe943030acfd9e892b2513ec1", - "opcode": 3, - "parent_process_name": "svchost.exe", - "parent_process_path": "C:\\Windows\\System32\\svchost.exe", - "pid": 1840, - "ppid": 648, - "process_name": "WmiPrvSE.exe", - "process_path": "C:\\Windows\\System32\\wbem\\WmiPrvSE.exe", - "serial_event_id": 26, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.540Z", - "unique_pid": 26, - "unique_ppid": 11, - "user_domain": "NT AUTHORITY", - "user_name": "NETWORK SERVICE" - }, - { - "command_line": "C:\\Windows\\System32\\msdtc.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "de0ece52236cfa3ed2dbfc03f28253a8", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 960, - "ppid": 524, - "process_name": "msdtc.exe", - "process_path": "C:\\Windows\\System32\\msdtc.exe", - "serial_event_id": 27, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.550Z", - "unique_pid": 27, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "NETWORK SERVICE" - }, - { - "command_line": "%SystemRoot%\\system32\\csrss.exe ObjectDirectory=\\Windows SharedSection=1024,20480,768 Windows=On SubSystemType=Windows ServerDll=basesrv,1 ServerDll=winsrv:UserServerDllInitialization,3 ServerDll=winsrv:ConServerDllInitialization,2 ServerDll=sxssrv,4 ProfileControl=Off MaxRequestThreads=16", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "60c2862b4bf0fd9f582ef344c2b1ec72", - "opcode": 3, - "pid": 3048, - "ppid": 3040, - "process_name": "csrss.exe", - "process_path": "C:\\Windows\\System32\\csrss.exe", - "serial_event_id": 28, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.790Z", - "unique_pid": 28, - "unique_ppid": 0, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "winlogon.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "1151b1baa6f350b1db6598e0fea7c457", - "opcode": 3, - "pid": 2108, - "ppid": 3040, - "process_name": "winlogon.exe", - "process_path": "C:\\Windows\\System32\\winlogon.exe", - "serial_event_id": 29, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.790Z", - "unique_pid": 29, - "unique_ppid": 0, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "rdpclip", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "25d284eb2f12254c001afe9a82575a81", - "opcode": 3, - "parent_process_name": "svchost.exe", - "parent_process_path": "C:\\Windows\\System32\\svchost.exe", - "pid": 2704, - "ppid": 968, - "process_name": "rdpclip.exe", - "process_path": "C:\\Windows\\System32\\rdpclip.exe", - "serial_event_id": 30, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.810Z", - "unique_pid": 30, - "unique_ppid": 19, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "\"taskhost.exe\"", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "517110bd83835338c037269e603db55d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 2776, - "ppid": 524, - "process_name": "taskhost.exe", - "process_path": "C:\\Windows\\System32\\taskhost.exe", - "serial_event_id": 31, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.810Z", - "unique_pid": 31, - "unique_ppid": 8, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "C:\\Windows\\system32\\sppsvc.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "e17e0188bb90fae42d83e98707efa59c", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 2804, - "ppid": 524, - "process_name": "sppsvc.exe", - "process_path": "C:\\Windows\\System32\\sppsvc.exe", - "serial_event_id": 32, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.810Z", - "unique_pid": 32, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "NETWORK SERVICE" - }, - { - "command_line": "\"C:\\Windows\\system32\\Dwm.exe\"", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "f162d5f5e845b9dc352dd1bad8cef1bc", - "opcode": 3, - "parent_process_name": "svchost.exe", - "parent_process_path": "C:\\Windows\\System32\\svchost.exe", - "pid": 2464, - "ppid": 896, - "process_name": "dwm.exe", - "process_path": "C:\\Windows\\System32\\dwm.exe", - "serial_event_id": 33, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.150Z", - "unique_pid": 33, - "unique_ppid": 16, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "C:\\Windows\\Explorer.EXE", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "ac4c51eb24aa95b77f705ab159189e24", - "opcode": 3, - "pid": 2460, - "ppid": 3052, - "process_name": "explorer.exe", - "process_path": "C:\\Windows\\explorer.exe", - "serial_event_id": 34, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.150Z", - "unique_pid": 34, - "unique_ppid": 0, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\" -n vmusr", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "404202d6f0628331aaade8c8f9ef6feb", - "opcode": 3, - "parent_process_name": "explorer.exe", - "parent_process_path": "C:\\Windows\\explorer.exe", - "pid": 2604, - "ppid": 2460, - "process_name": "vmtoolsd.exe", - "process_path": "C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe", - "serial_event_id": 35, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.150Z", - "unique_pid": 35, - "unique_ppid": 34, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "C:\\Windows\\system32\\SearchIndexer.exe /Embedding", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "ad31942bdf3d594c404874613bc2fe4d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 1620, - "ppid": 524, - "process_name": "SearchIndexer.exe", - "process_path": "C:\\Windows\\System32\\SearchIndexer.exe", - "serial_event_id": 36, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.210Z", - "unique_pid": 36, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\svchost.exe -k LocalServiceAndNoImpersonation", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 3684, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 37, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.750Z", - "unique_pid": 37, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "LOCAL SERVICE" - }, - { - "command_line": "C:\\Windows\\System32\\svchost.exe -k secsvcs", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 3712, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 38, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:25.750Z", - "unique_pid": 38, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "\"C:\\Windows\\system32\\cmd.exe\" ", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "5746bd7e255dd6a8afa06f7c42c1ba41", - "opcode": 3, - "parent_process_name": "explorer.exe", - "parent_process_path": "C:\\Windows\\explorer.exe", - "pid": 2864, - "ppid": 2460, - "process_name": "cmd.exe", - "process_path": "C:\\Windows\\System32\\cmd.exe", - "serial_event_id": 39, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:31.190Z", - "unique_pid": 39, - "unique_ppid": 34, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "\\??\\C:\\Windows\\system32\\conhost.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "bd51024fb014064bc9fe8c715c18392f", - "opcode": 3, - "parent_process_name": "csrss.exe", - "parent_process_path": "C:\\Windows\\System32\\csrss.exe", - "pid": 2228, - "ppid": 3048, - "process_name": "conhost.exe", - "process_path": "C:\\Windows\\System32\\conhost.exe", - "serial_event_id": 40, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:31.190Z", - "unique_pid": 40, - "unique_ppid": 28, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "C:\\Windows\\system32\\svchost.exe -k SDRSVC", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "c78655bc80301d76ed4fef1c1ea40a7d", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 3820, - "ppid": 524, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 41, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:31.310Z", - "unique_pid": 41, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\servicing\\TrustedInstaller.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "773212b2aaa24c1e31f10246b15b276c", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 3384, - "ppid": 524, - "process_name": "TrustedInstaller.exe", - "process_path": "C:\\Windows\\servicing\\TrustedInstaller.exe", - "serial_event_id": 42, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.130Z", - "unique_pid": 42, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\wbem\\wmiprvse.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "8f4ecbbfe943030acfd9e892b2513ec1", - "opcode": 3, - "parent_process_name": "svchost.exe", - "parent_process_path": "C:\\Windows\\System32\\svchost.exe", - "pid": 1860, - "ppid": 648, - "process_name": "WmiPrvSE.exe", - "process_path": "C:\\Windows\\System32\\wbem\\WmiPrvSE.exe", - "serial_event_id": 43, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.230Z", - "unique_pid": 43, - "unique_ppid": 11, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "taskeng.exe {6108575A-1CC2-4917-BB5D-5929CDC39B9C}", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "65ea57712340c09b1b0c427b4848ae05", - "opcode": 3, - "parent_process_name": "svchost.exe", - "parent_process_path": "C:\\Windows\\System32\\svchost.exe", - "pid": 660, - "ppid": 924, - "process_name": "taskeng.exe", - "process_path": "C:\\Windows\\System32\\taskeng.exe", - "serial_event_id": 44, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.900Z", - "unique_pid": 44, - "unique_ppid": 17, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "C:\\Windows\\system32\\msiexec.exe /V", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "a190da6546501cb4146bbcc0b6a3f48b", - "opcode": 3, - "parent_process_name": "services.exe", - "parent_process_path": "C:\\Windows\\System32\\services.exe", - "pid": 760, - "ppid": 524, - "process_name": "msiexec.exe", - "process_path": "C:\\Windows\\System32\\msiexec.exe", - "serial_event_id": 45, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.370Z", - "unique_pid": 45, - "unique_ppid": 8, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\wsmprovhost.exe -Embedding", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "3e5cfefdda537ddbed9f5c6c7e926cdd", - "opcode": 3, - "parent_process_name": "svchost.exe", - "parent_process_path": "C:\\Windows\\System32\\svchost.exe", - "pid": 2824, - "ppid": 648, - "process_name": "wsmprovhost.exe", - "process_path": "C:\\Windows\\System32\\wsmprovhost.exe", - "serial_event_id": 46, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.980Z", - "unique_pid": 46, - "unique_ppid": 11, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "C:\\Windows\\system32\\wsmprovhost.exe -Embedding", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "3e5cfefdda537ddbed9f5c6c7e926cdd", - "opcode": 3, - "parent_process_name": "svchost.exe", - "parent_process_path": "C:\\Windows\\System32\\svchost.exe", - "pid": 3408, - "ppid": 648, - "process_name": "wsmprovhost.exe", - "process_path": "C:\\Windows\\System32\\wsmprovhost.exe", - "serial_event_id": 47, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.020Z", - "unique_pid": 47, - "unique_ppid": 11, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "\"C:\\Python27\\python.exe\" worker.py --target c:\\workspace\\red_ttp\\process_name_masquerade.py", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "21f73cd55626f0ec9fbce53eafbef128", - "opcode": 3, - "parent_process_name": "wsmprovhost.exe", - "parent_process_path": "C:\\Windows\\System32\\wsmprovhost.exe", - "pid": 420, - "ppid": 3408, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 48, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.020Z", - "unique_pid": 48, - "unique_ppid": 47, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "\\??\\C:\\Windows\\system32\\conhost.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "bd51024fb014064bc9fe8c715c18392f", - "opcode": 3, - "parent_process_name": "csrss.exe", - "parent_process_path": "C:\\Windows\\System32\\csrss.exe", - "pid": 3080, - "ppid": 372, - "process_name": "conhost.exe", - "process_path": "C:\\Windows\\System32\\conhost.exe", - "serial_event_id": 49, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.020Z", - "unique_pid": 49, - "unique_ppid": 4, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "C:\\Python27\\python.exe myappserver.py --log-file C:\\workspace\\dev\\myapp.out --update-server-port 8446 --sout C:\\workspace\\Libraries\\myapp\\myapp\\python\\myapp\\hunt_out.json", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "21f73cd55626f0ec9fbce53eafbef128", - "opcode": 3, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 1688, - "ppid": 420, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 50, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.100Z", - "unique_pid": 50, - "unique_ppid": 48, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "C:\\Python27\\python.exe C:\\workspace\\dev\\Simple_Https_Server\\simple_https_server.py", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "21f73cd55626f0ec9fbce53eafbef128", - "opcode": 3, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 1720, - "ppid": 420, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 51, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.100Z", - "unique_pid": 51, - "unique_ppid": 48, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "C:\\Windows\\System32\\LauncherProcess.exe", - "event_subtype_full": "already_running", - "event_type": "process", - "event_type_full": "process_event", - "md5": "6a8649f3205b311e208ac35a04e99700", - "opcode": 3, - "parent_process_name": "svchost.exe", - "parent_process_path": "C:\\Windows\\System32\\svchost.exe", - "pid": 2164, - "ppid": 648, - "process_name": "LauncherProcess.exe", - "process_path": "C:\\Windows\\System32\\LauncherProcess.exe", - "serial_event_id": 52, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.150Z", - "unique_pid": 52, - "unique_ppid": 11, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "C:\\Windows\\system32\\cmd.exe /c \"c:\\workspace\\red_ttp\\process_name_masquerade.py\"", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "5746bd7e255dd6a8afa06f7c42c1ba41", - "opcode": 1, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 1788, - "ppid": 420, - "process_name": "cmd.exe", - "process_path": "C:\\Windows\\System32\\cmd.exe", - "serial_event_id": 53, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.29420914Z", - "unique_pid": 53, - "unique_ppid": 48, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "\"C:\\Python27\\python.exe\" \"C:\\workspace\\red_ttp\\process_name_masquerade.py\" ", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "21f73cd55626f0ec9fbce53eafbef128", - "opcode": 1, - "parent_process_name": "cmd.exe", - "parent_process_path": "C:\\Windows\\System32\\cmd.exe", - "pid": 2256, - "ppid": 1788, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 54, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.29436514Z", - "unique_pid": 54, - "unique_ppid": 53, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "svchost.exe", - "file_path": "C:\\workspace\\red_ttp\\svchost.exe", - "opcode": 0, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 55, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.29545714Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "svchost.exe", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 1, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 2760, - "ppid": 2256, - "process_name": "svchost.exe", - "process_path": "C:\\workspace\\red_ttp\\svchost.exe", - "serial_event_id": 56, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.29561314Z", - "unique_pid": 56, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "bytes_written_count": 20, - "bytes_written_string_list": [ - "en-US", - "en" - ], - "event_subtype_full": "registry_modify_event", - "event_type": "registry", - "event_type_full": "registry_event", - "key_path": "\\REGISTRY\\USER\\S-1-5-21-3942132181-2402070379-3970972291-1001_CLASSES\\Local Settings\\MuiCache\\1B\\52C64B7E\\LanguageList", - "key_type": "multiSz", - "opcode": 1, - "pid": 2460, - "process_name": "explorer.exe", - "process_path": "C:\\Windows\\explorer.exe", - "registry_key": "\\REGISTRY\\USER\\S-1-5-21-3942132181-2402070379-3970972291-1001_CLASSES\\Local Settings\\MuiCache\\1B\\52C64B7E", - "registry_path": "\\REGISTRY\\USER\\S-1-5-21-3942132181-2402070379-3970972291-1001_CLASSES\\Local Settings\\MuiCache\\1B\\52C64B7E\\LanguageList", - "registry_type": "multi_string", - "registry_value": "LanguageList", - "serial_event_id": 57, - "@timestamp": "1970-01-02T12:31:49.3060652Z", - "unique_pid": 34, - "user_name": "vagrant" - }, - { - "event_subtype_full": "termination_event", - "event_type": "process", - "event_type_full": "process_event", - "exit_code": 0, - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 2, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 2760, - "ppid": 2256, - "process_name": "svchost.exe", - "process_path": "C:\\workspace\\red_ttp\\svchost.exe", - "serial_event_id": 58, - "subtype": "terminate", - "@timestamp": "1970-01-02T12:31:49.34568946Z", - "unique_pid": 56, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_delete_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "svchost.exe", - "file_path": "C:\\workspace\\red_ttp\\svchost.exe", - "opcode": 2, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 59, - "subtype": "modify", - "@timestamp": "1970-01-02T12:31:49.34568946Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "SVCHOST.EXE-CB1B3AA2.pf", - "file_path": "C:\\Windows\\Prefetch\\SVCHOST.EXE-CB1B3AA2.pf", - "opcode": 0, - "pid": 896, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 60, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.34568946Z", - "unique_pid": 16, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "lsass.exe", - "file_path": "C:\\workspace\\red_ttp\\lsass.exe", - "opcode": 0, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 61, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.34568946Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "lsass.exe", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 1, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 3696, - "ppid": 2256, - "process_name": "lsass.exe", - "process_path": "C:\\workspace\\red_ttp\\lsass.exe", - "serial_event_id": 62, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.34568946Z", - "unique_pid": 62, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "request_event", - "event_type": "dns", - "event_type_full": "dns_event", - "opcode": 3008, - "pid": 924, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "query_name": "teredo.ipv6.microsoft.com.", - "serial_event_id": 63, - "@timestamp": "1970-01-02T12:31:49.35036949Z", - "unique_pid": 17, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "event_subtype_full": "termination_event", - "event_type": "process", - "event_type_full": "process_event", - "exit_code": 0, - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 2, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 3696, - "ppid": 2256, - "process_name": "lsass.exe", - "process_path": "C:\\workspace\\red_ttp\\lsass.exe", - "serial_event_id": 64, - "subtype": "terminate", - "@timestamp": "1970-01-02T12:31:49.39592178Z", - "unique_pid": 62, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_delete_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "lsass.exe", - "file_path": "C:\\workspace\\red_ttp\\lsass.exe", - "opcode": 2, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 65, - "subtype": "modify", - "@timestamp": "1970-01-02T12:31:49.39592178Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "LSASS.EXE-02265BD5.pf", - "file_path": "C:\\Windows\\Prefetch\\LSASS.EXE-02265BD5.pf", - "opcode": 0, - "pid": 896, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 66, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.39592178Z", - "unique_pid": 16, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "services.exe", - "file_path": "C:\\workspace\\red_ttp\\services.exe", - "opcode": 0, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 67, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.39592178Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "services.exe", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 1, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 1832, - "ppid": 2256, - "process_name": "services.exe", - "process_path": "C:\\workspace\\red_ttp\\services.exe", - "serial_event_id": 68, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.39592178Z", - "unique_pid": 68, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "termination_event", - "event_type": "process", - "event_type_full": "process_event", - "exit_code": 0, - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 2, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 1832, - "ppid": 2256, - "process_name": "services.exe", - "process_path": "C:\\workspace\\red_ttp\\services.exe", - "serial_event_id": 69, - "subtype": "terminate", - "@timestamp": "1970-01-02T12:31:49.44677811Z", - "unique_pid": 68, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_delete_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "services.exe", - "file_path": "C:\\workspace\\red_ttp\\services.exe", - "opcode": 2, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 70, - "subtype": "modify", - "@timestamp": "1970-01-02T12:31:49.44677811Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "SERVICES.EXE-01D9177B.pf", - "file_path": "C:\\Windows\\Prefetch\\SERVICES.EXE-01D9177B.pf", - "opcode": 0, - "pid": 896, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 71, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.44677811Z", - "unique_pid": 16, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "csrss.exe", - "file_path": "C:\\workspace\\red_ttp\\csrss.exe", - "opcode": 0, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 72, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.44677811Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "csrss.exe", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 1, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 3948, - "ppid": 2256, - "process_name": "csrss.exe", - "process_path": "C:\\workspace\\red_ttp\\csrss.exe", - "serial_event_id": 73, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.44677811Z", - "unique_pid": 73, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "termination_event", - "event_type": "process", - "event_type_full": "process_event", - "exit_code": 0, - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 2, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 3948, - "ppid": 2256, - "process_name": "csrss.exe", - "process_path": "C:\\workspace\\red_ttp\\csrss.exe", - "serial_event_id": 74, - "subtype": "terminate", - "@timestamp": "1970-01-02T12:31:49.49701043Z", - "unique_pid": 73, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_delete_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "csrss.exe", - "file_path": "C:\\workspace\\red_ttp\\csrss.exe", - "opcode": 2, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 75, - "subtype": "modify", - "@timestamp": "1970-01-02T12:31:49.49701043Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "smss.exe", - "file_path": "C:\\workspace\\red_ttp\\smss.exe", - "opcode": 0, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 76, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.49701043Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "CSRSS.EXE-006B4E4D.pf", - "file_path": "C:\\Windows\\Prefetch\\CSRSS.EXE-006B4E4D.pf", - "opcode": 0, - "pid": 896, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 77, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.49701043Z", - "unique_pid": 16, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "command_line": "smss.exe", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 1, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 3720, - "ppid": 2256, - "process_name": "smss.exe", - "process_path": "C:\\workspace\\red_ttp\\smss.exe", - "serial_event_id": 78, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.49701043Z", - "unique_pid": 78, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "bytes_written_count": 80, - "event_subtype_full": "registry_modify_event", - "event_type": "registry", - "event_type_full": "registry_event", - "key_path": "\\REGISTRY\\MACHINE\\SAM\\SAM\\DOMAINS\\Account\\Users\\000003E9\\F", - "key_type": "binary", - "opcode": 1, - "pid": 536, - "process_name": "lsass.exe", - "process_path": "C:\\Windows\\System32\\lsass.exe", - "registry_key": "\\REGISTRY\\MACHINE\\SAM\\SAM\\DOMAINS\\Account\\Users\\000003E9", - "registry_path": "\\REGISTRY\\MACHINE\\SAM\\SAM\\DOMAINS\\Account\\Users\\000003E9\\F", - "registry_type": "binary", - "registry_value": "F", - "serial_event_id": 79, - "@timestamp": "1970-01-02T12:31:49.52056658Z", - "unique_pid": 9, - "user_name": "SYSTEM" - }, - { - "event_subtype_full": "termination_event", - "event_type": "process", - "event_type_full": "process_event", - "exit_code": 0, - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 2, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 3720, - "ppid": 2256, - "process_name": "smss.exe", - "process_path": "C:\\workspace\\red_ttp\\smss.exe", - "serial_event_id": 80, - "subtype": "terminate", - "@timestamp": "1970-01-02T12:31:49.54708675Z", - "unique_pid": 78, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_delete_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "smss.exe", - "file_path": "C:\\workspace\\red_ttp\\smss.exe", - "opcode": 2, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 81, - "subtype": "modify", - "@timestamp": "1970-01-02T12:31:49.54708675Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "SMSS.EXE-8C66D82D.pf", - "file_path": "C:\\Windows\\Prefetch\\SMSS.EXE-8C66D82D.pf", - "opcode": 0, - "pid": 896, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 82, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.54708675Z", - "unique_pid": 16, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "wininit.exe", - "file_path": "C:\\workspace\\red_ttp\\wininit.exe", - "opcode": 0, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 83, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.54708675Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "wininit.exe", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 1, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 1680, - "ppid": 2256, - "process_name": "wininit.exe", - "process_path": "C:\\workspace\\red_ttp\\wininit.exe", - "serial_event_id": 84, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.54708675Z", - "unique_pid": 84, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "termination_event", - "event_type": "process", - "event_type_full": "process_event", - "exit_code": 0, - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 2, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 1680, - "ppid": 2256, - "process_name": "wininit.exe", - "process_path": "C:\\workspace\\red_ttp\\wininit.exe", - "serial_event_id": 85, - "subtype": "terminate", - "@timestamp": "1970-01-02T12:31:49.59716307Z", - "unique_pid": 84, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_delete_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "wininit.exe", - "file_path": "C:\\workspace\\red_ttp\\wininit.exe", - "opcode": 2, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 86, - "subtype": "modify", - "@timestamp": "1970-01-02T12:31:49.59716307Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "WININIT.EXE-F4D46129.pf", - "file_path": "C:\\Windows\\Prefetch\\WININIT.EXE-F4D46129.pf", - "opcode": 0, - "pid": 896, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 87, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.59716307Z", - "unique_pid": 16, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "explorer.exe", - "file_path": "C:\\workspace\\red_ttp\\explorer.exe", - "opcode": 0, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 88, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.59716307Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "command_line": "explorer.exe", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 1, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 4080, - "ppid": 2256, - "process_name": "explorer.exe", - "process_path": "C:\\workspace\\red_ttp\\explorer.exe", - "serial_event_id": 89, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.59716307Z", - "unique_pid": 89, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "termination_event", - "event_type": "process", - "event_type_full": "process_event", - "exit_code": 0, - "md5": "f49c54c4997a0401db0f6640a6111c52", - "opcode": 2, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 4080, - "ppid": 2256, - "process_name": "explorer.exe", - "process_path": "C:\\workspace\\red_ttp\\explorer.exe", - "serial_event_id": 90, - "subtype": "terminate", - "@timestamp": "1970-01-02T12:31:49.6472394Z", - "unique_pid": 89, - "unique_ppid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_delete_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "explorer.exe", - "file_path": "C:\\workspace\\red_ttp\\explorer.exe", - "opcode": 2, - "pid": 2256, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 91, - "subtype": "modify", - "@timestamp": "1970-01-02T12:31:49.6472394Z", - "unique_pid": 54, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "EXPLORER.EXE-854AF04C.pf", - "file_path": "C:\\Windows\\Prefetch\\EXPLORER.EXE-854AF04C.pf", - "opcode": 0, - "pid": 896, - "process_name": "svchost.exe", - "process_path": "C:\\Windows\\System32\\svchost.exe", - "serial_event_id": 92, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.6472394Z", - "unique_pid": 16, - "user_domain": "NT AUTHORITY", - "user_name": "SYSTEM" - }, - { - "event_subtype_full": "termination_event", - "event_type": "process", - "event_type_full": "process_event", - "exit_code": 0, - "md5": "21f73cd55626f0ec9fbce53eafbef128", - "opcode": 2, - "parent_process_name": "cmd.exe", - "parent_process_path": "C:\\Windows\\System32\\cmd.exe", - "pid": 2256, - "ppid": 1788, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 93, - "subtype": "terminate", - "@timestamp": "1970-01-02T12:31:49.6472394Z", - "unique_pid": 54, - "unique_ppid": 53, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "termination_event", - "event_type": "process", - "event_type_full": "process_event", - "exit_code": 0, - "md5": "5746bd7e255dd6a8afa06f7c42c1ba41", - "opcode": 2, - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 1788, - "ppid": 420, - "process_name": "cmd.exe", - "process_path": "C:\\Windows\\System32\\cmd.exe", - "serial_event_id": 94, - "subtype": "terminate", - "@timestamp": "1970-01-02T12:31:49.6472394Z", - "unique_pid": 53, - "unique_ppid": 48, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "something.json", - "file_path": "C:\\workspace\\dev\\TestLogs\\something.json", - "opcode": 0, - "pid": 420, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 95, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.6472394Z", - "unique_pid": 48, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "file_create_event", - "event_type": "file", - "event_type_full": "file_event", - "file_name": "something.json", - "file_path": "C:\\workspace\\Libraries\\myapp\\myapp\\python\\myapp\\something.json", - "opcode": 0, - "pid": 420, - "process_name": "python.exe", - "process_path": "C:\\Python27\\python.exe", - "serial_event_id": 96, - "subtype": "create", - "@timestamp": "1970-01-02T12:31:49.6472394Z", - "unique_pid": 48, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "authentication_id": 854482244, - "command_line": "net localgroup administrators findme2", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "63dd6fbaabf881385899fd39df13dce3", - "opcode": 1, - "original_file_name": "NET.exe", - "parent_process_name": "cmd.exe", - "parent_process_path": "C:\\Windows\\System32\\cmd.exe", - "pid": 3608, - "ppid": 392, - "process_name": "net.exe", - "process_path": "C:\\Windows\\System32\\net.exe", - "serial_event_id": 97, - "subtype": "create", - "@timestamp": "1970-01-02T12:33:25.08349437Z", - "unique_pid": 750058, - "unique_ppid": 707545, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "authentication_id": 854482244, - "command_line": "C:\\Windows\\system32\\net1 localgroup administrators findme2", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "3b6928bc39e5530cead1e99269e7b1ee", - "opcode": 1, - "original_file_name": "net1.exe", - "parent_process_name": "net.exe", - "parent_process_path": "C:\\Windows\\System32\\net.exe", - "pid": 1348, - "ppid": 3608, - "process_name": "net1.exe", - "process_path": "C:\\Windows\\System32\\net1.exe", - "serial_event_id": 98, - "subtype": "create", - "@timestamp": "1970-01-02T12:33:25.08380637Z", - "unique_pid": 750059, - "unique_ppid": 750058, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "authentication_id": 13728872, - "command_line": "C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\msbuild.exe tmp-file.csproj", - "event_subtype_full": "creation_event", - "event_type": "process", - "event_type_full": "process_event", - "md5": "4b736b85e5de65e572f28a91e31b99bf", - "opcode": 1, - "original_file_name": "MSBuild.exe", - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 860, - "ppid": 1196, - "process_name": "MSBuild.exe", - "process_path": "C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\MSBuild.exe", - "serial_event_id": 75273, - "subtype": "create", - "@timestamp": "1970-01-02T12:36:02.48450211Z", - "unique_pid": 75273, - "unique_ppid": 75248, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "event_subtype_full": "termination_event", - "event_type": "process", - "event_type_full": "process_event", - "exit_code": 0, - "md5": "4b736b85e5de65e572f28a91e31b99bf", - "opcode": 2, - "original_file_name": "MSBuild.exe", - "parent_process_name": "python.exe", - "parent_process_path": "C:\\Python27\\python.exe", - "pid": 860, - "ppid": 1196, - "process_name": "MSBuild.exe", - "process_path": "C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\MSBuild.exe", - "serial_event_id": 75303, - "subtype": "terminate", - "@timestamp": "1970-01-02T12:36:02.49348368Z", - "unique_pid": 75273, - "unique_ppid": 75248, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "destination_address": "10.6.48.157", - "destination_port": 8000, - "event_subtype_full": "ipv4_connection_attempt_event", - "event_type": "network", - "event_type_full": "network_event", - "opcode": 12, - "pid": 860, - "process_name": "MSBuild.exe", - "process_path": "C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\MSBuild.exe", - "protocol": "tcp", - "serial_event_id": 75304, - "source_address": "10.6.48.157", - "source_port": 52178, - "subtype": "outgoing", - "@timestamp": "1970-01-02T12:36:02.49303976Z", - "unique_pid": 75273, - "user_domain": "vagrant", - "user_name": "vagrant" - }, - { - "destination_address": "10.6.48.157", - "destination_port": 8000, - "event_subtype_full": "ipv4_connection_attempt_event", - "event_type": "network", - "event_type_full": "network_event", - "mysterious_field": { - "num": 100, - "outer_cross_match": "s3-c-x-y", - "subarray": [ - { - "a": "s0-a", - "b": [ - "s0-b" - ], - "c": [ - { - "x": { - "y": "s0-c-x-y" - }, - "z": "s0-c0-x-z" - }, - { - "x": { - "y": "s0-c-x-y" - }, - "z": "s0-c1-x-z" - } - ], - "cross_match": "s0-c1-x-z" - }, - { - "a": "s1-a", - "b": [ - "s1-b" - ], - "c": [] - }, - { - "a": "s2-a", - "b": [ - "s2-b" - ], - "c": [] - }, - { - "a": "s3-a", - "b": [ - "s3-b" - ], - "c": [ - { - "x": { - "y": "s3-c-x-y" - }, - "z": "s3-c-x-z" - } - ] - } - ], - "this_is_for_testing_nested_data": "true" - }, - "opcode": 12, - "pid": 10000, - "process_name": "MSBuild.exe", - "process_path": "C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\MSBuild.exe", - "protocol": "tcp", - "serial_event_id": 75305, - "source_address": "10.6.48.157", - "source_port": 52178, - "subtype": "outgoing", - "@timestamp": "1970-01-02T12:36:02.49303976Z", - "unique_pid": 99999, - "user_domain": "vagrant", - "user_name": "vagrant" - } -] diff --git a/x-pack/plugin/eql/qa/common/src/main/resources/test_queries.toml b/x-pack/plugin/eql/qa/common/src/main/resources/test_queries.toml index 3dcdb3da1452f..308fe5292f41e 100644 --- a/x-pack/plugin/eql/qa/common/src/main/resources/test_queries.toml +++ b/x-pack/plugin/eql/qa/common/src/main/resources/test_queries.toml @@ -860,8 +860,9 @@ expected_event_ids = [67, 68, 69, 70, [[queries]] name = "sequencesOnDifferentEventTypes7" +# query matches with a timespan of over 5000ms for millis and over 50ms for nano test query = ''' -sequence with maxspan=500ms +sequence with maxspan=50ms [file where event_subtype_full : "file_create_event"] by file_path [process where opcode == 1] by process_path [process where opcode == 2] by process_path diff --git a/x-pack/plugin/eql/qa/common/src/main/resources/test_queries_date_nanos.toml b/x-pack/plugin/eql/qa/common/src/main/resources/test_queries_date_nanos.toml index 8c394ba07e124..10bc2d8519d02 100644 --- a/x-pack/plugin/eql/qa/common/src/main/resources/test_queries_date_nanos.toml +++ b/x-pack/plugin/eql/qa/common/src/main/resources/test_queries_date_nanos.toml @@ -4,20 +4,20 @@ [[queries]] name = "filterOnDateNanosWithMillis" query = ''' - process where @timestamp == "1970-01-02T12:31:25.510Z" + process where @timestamp == "1974-03-02T19:53:16.510Z" ''' expected_event_ids = [2, 3, 4, 5, 6, 7] [[queries]] name = "filterOnDateNanosWithNanos" query = ''' - process where @timestamp == "1970-01-02T12:31:49.29436514Z" + process where @timestamp == "1974-03-03T02:22:54.294365140Z" ''' expected_event_ids = [54] [[queries]] name = "filterOnDateNanosWithNanosRange" query = ''' - process where @timestamp > "1970-01-02T12:31:49.29420914Z" and @timestamp < "1970-01-02T12:31:49.35036949Z" + process where @timestamp > "1974-03-03T02:22:54.294209140Z" and @timestamp < "1974-03-03T02:22:54.350369490Z" ''' expected_event_ids = [54, 56, 58, 62] diff --git a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java index 61e453092dd1f..f468779ba59e0 100644 --- a/x-pack/plugin/eql/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java +++ b/x-pack/plugin/eql/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/eql/EqlDateNanosIT.java @@ -9,12 +9,12 @@ import org.elasticsearch.test.eql.EqlDateNanosSpecTestCase; -import static org.elasticsearch.test.eql.DataLoader.DATE_NANOS_INDEX; +import static org.elasticsearch.test.eql.DataLoader.TEST_NANOS_INDEX; import static org.elasticsearch.xpack.eql.RemoteClusterTestUtils.remoteClusterIndex; public class EqlDateNanosIT extends EqlDateNanosSpecTestCase { public EqlDateNanosIT(String query, String name, long[] eventIds) { - super(remoteClusterIndex(DATE_NANOS_INDEX), query, name, eventIds); + super(remoteClusterIndex(TEST_NANOS_INDEX), query, name, eventIds); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/BoxedQueryRequest.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/BoxedQueryRequest.java index 919dc959d6d3f..27a5ce473ea4d 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/BoxedQueryRequest.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/BoxedQueryRequest.java @@ -78,7 +78,8 @@ public void nextAfter(Ordinal ordinal) { */ public BoxedQueryRequest from(Ordinal begin) { from = begin; - timestampRange.gte(begin != null ? begin.timestamp() : null); + // the range limits need to be serializable: convert to string for StreamOutput to be able to handle the value + timestampRange.gte(begin != null ? begin.timestamp().toString() : null); return this; } @@ -88,7 +89,7 @@ public BoxedQueryRequest from(Ordinal begin) { */ public BoxedQueryRequest to(Ordinal end) { to = end; - timestampRange.lte(end != null ? end.timestamp() : null); + timestampRange.lte(end != null ? end.timestamp().toString() : null); return this; } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/Criterion.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/Criterion.java index 02b68f0a96d35..ffadccafc3755 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/Criterion.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/Criterion.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; import org.elasticsearch.xpack.eql.execution.search.Ordinal; import org.elasticsearch.xpack.eql.execution.search.QueryRequest; +import org.elasticsearch.xpack.eql.execution.search.Timestamp; import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; import java.util.List; @@ -77,13 +78,11 @@ public Object[] key(SearchHit hit) { @SuppressWarnings({ "unchecked" }) public Ordinal ordinal(SearchHit hit) { Object ts = timestamp.extract(hit); - if (ts instanceof Number == false) { - throw new EqlIllegalArgumentException("Expected timestamp as long but got {}", ts); + if (ts instanceof Timestamp == false) { + throw new EqlIllegalArgumentException("Expected timestamp as a Timestamp but got {}", ts.getClass()); } - long timestamp = ((Number) ts).longValue(); Comparable tbreaker = null; - if (tiebreaker != null) { Object tb = tiebreaker.extract(hit); if (tb != null && tb instanceof Comparable == false) { @@ -97,7 +96,7 @@ public Ordinal ordinal(SearchHit hit) { throw new EqlIllegalArgumentException("Expected _shard_doc/implicit tiebreaker as long but got [{}]", implicitTbreaker); } long implicitTiebreaker = ((Number) implicitTbreaker).longValue(); - return new Ordinal(timestamp, tbreaker, implicitTiebreaker); + return new Ordinal((Timestamp) ts, tbreaker, implicitTiebreaker); } @Override diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/MillisTimestamp.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/MillisTimestamp.java new file mode 100644 index 0000000000000..cc5bcb7982e31 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/MillisTimestamp.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.eql.execution.search; + +import java.time.Instant; + +// Timestamp implementation able to hold a timestamp with millisecond accuracy. +public class MillisTimestamp extends Timestamp { + private final long timestamp; + private Instant instant = null; + + MillisTimestamp(long millis) { + timestamp = millis; + } + + @Override + public int compareTo(Timestamp other) { + return other instanceof MillisTimestamp ? Long.compare(timestamp, ((MillisTimestamp) other).timestamp) : super.compareTo(other); + } + + @Override + public long delta(Timestamp other) { + return other instanceof MillisTimestamp ? (timestamp - ((MillisTimestamp) other).timestamp) * NANOS_PER_MILLI : super.delta(other); + } + + @Override + public Instant instant() { + if (instant == null) { + instant = Instant.ofEpochMilli(timestamp); + } + return instant; + } + + public String asString() { + return String.valueOf(timestamp); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/NanosTimestamp.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/NanosTimestamp.java new file mode 100644 index 0000000000000..10e0c07f15add --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/NanosTimestamp.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.eql.execution.search; + +import java.time.Instant; + +// Timestamp implementation able to hold a timestamp with nanosecond accuracy. +public class NanosTimestamp extends Timestamp { + // NB: doubles are not accurate enough to hold six digit micros with granularity for current dates. + private final Instant timestamp; + + NanosTimestamp(long millis, long micros) { + timestamp = Instant.ofEpochMilli(millis).plusNanos(micros); + } + + @Override + public Instant instant() { + return timestamp; + } + + public String asString() { + long nanos = timestamp.getNano(); + long millisOfSecond = nanos / NANOS_PER_MILLI; + return (timestamp.getEpochSecond() * MILLIS_PER_SECOND + millisOfSecond) + "." + (nanos - millisOfSecond * NANOS_PER_MILLI); + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/Ordinal.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/Ordinal.java index 2b672b59d4739..d1e8e3f97c254 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/Ordinal.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/Ordinal.java @@ -9,23 +9,24 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; + import java.util.Objects; public class Ordinal implements Comparable, Accountable { private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(Ordinal.class); - private final long timestamp; + private final Timestamp timestamp; private final Comparable tiebreaker; private final long implicitTiebreaker; // _shard_doc tiebreaker automatically added by ES PIT - public Ordinal(long timestamp, Comparable tiebreaker, long implicitTiebreaker) { + public Ordinal(Timestamp timestamp, Comparable tiebreaker, long implicitTiebreaker) { this.timestamp = timestamp; this.tiebreaker = tiebreaker; this.implicitTiebreaker = implicitTiebreaker; } - public long timestamp() { + public Timestamp timestamp() { return timestamp; } @@ -70,16 +71,15 @@ public String toString() { @Override public int compareTo(Ordinal o) { - if (timestamp < o.timestamp) { + int timestampCompare = timestamp.compareTo(o.timestamp); + if (timestampCompare < 0) { return -1; } - if (timestamp == o.timestamp) { + if (timestampCompare == 0) { if (tiebreaker != null) { if (o.tiebreaker != null) { - if (tiebreaker.compareTo(o.tiebreaker) == 0) { - return Long.compare(implicitTiebreaker, o.implicitTiebreaker); - } - return tiebreaker.compareTo(o.tiebreaker); + int tiebreakerCompare = tiebreaker.compareTo(o.tiebreaker); + return tiebreakerCompare == 0 ? Long.compare(implicitTiebreaker, o.implicitTiebreaker) : tiebreakerCompare; } else { return -1; } @@ -120,7 +120,7 @@ public boolean afterOrAt(Ordinal other) { public Object[] toArray() { return tiebreaker != null ? - new Object[] { timestamp, tiebreaker, implicitTiebreaker } - : new Object[] { timestamp, implicitTiebreaker }; + new Object[] { timestamp.toString(), tiebreaker, implicitTiebreaker } + : new Object[] { timestamp.toString(), implicitTiebreaker }; } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/Timestamp.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/Timestamp.java new file mode 100644 index 0000000000000..3eae288724042 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/Timestamp.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.eql.execution.search; + +import java.time.Instant; + +import static java.time.temporal.ChronoUnit.NANOS; + +// wrapper for Unix epoch timestamps with different resolutions. +public abstract class Timestamp { + static final long MILLIS_PER_SECOND = 1_000L; + static final long NANOS_PER_MILLI = 1_000_000L; + private static final long[] MICROS_MULTIPLIER = {0L, 100_000L, 10_000L, 1_000L, 1_00L, 10L}; + + private String source; + + abstract Instant instant(); + + int compareTo(Timestamp other) { + return instant().compareTo(other.instant()); + } + + public static Timestamp of(String milliseconds) { + Timestamp timestamp; + // ES will provide a . with nano-timestamps + int dotIndex = milliseconds.lastIndexOf('.'); + if (dotIndex > 0) { + long millis = Long.parseLong(milliseconds.substring(0, dotIndex)); + int digits = milliseconds.length() - dotIndex - 1; + long micros = (digits >= 6) ? Long.parseLong(milliseconds.substring(dotIndex + 1, dotIndex + 1 + 6)) : + Long.parseLong(milliseconds.substring(dotIndex + 1)) * MICROS_MULTIPLIER[digits]; + timestamp = new NanosTimestamp(millis, micros); + } else { + timestamp = new MillisTimestamp(Long.parseLong(milliseconds)); + } + + timestamp.source = milliseconds; + return timestamp; + } + + // time delta in nanos between this and other instance + public long delta(Timestamp other) { + return other.instant().until(instant(), NANOS); + } + + @Override + public String toString() { + return source != null ? source : asString(); + } + + abstract String asString(); +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/TimestampFieldHitExtractor.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/TimestampFieldHitExtractor.java index 41a5961f13e90..f6b1ffd01ded6 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/TimestampFieldHitExtractor.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/TimestampFieldHitExtractor.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.eql.execution.search.extractor; +import org.elasticsearch.xpack.eql.execution.search.Timestamp; + public class TimestampFieldHitExtractor extends FieldHitExtractor { public TimestampFieldHitExtractor(FieldHitExtractor target) { @@ -16,6 +18,6 @@ public TimestampFieldHitExtractor(FieldHitExtractor target) { @Override protected Object parseEpochMillisAsString(String str) { - return Long.parseLong(str); + return Timestamp.of(str); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java index 4228eaaea754c..0bced10cb822b 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java @@ -69,7 +69,7 @@ public void clear() { // Set of completed sequences - separate to avoid polluting the other stages // It is a set since matches are ordered at insertion time based on the ordinal of the first entry private final Set completed; - private final long maxSpanInMillis; + private final long maxSpanInNanos; private final boolean descending; @@ -91,7 +91,7 @@ public SequenceMatcher(int stages, boolean descending, TimeValue maxSpan, Limit this.keyToSequences = new KeyToSequences(completionStage); this.completed = new TreeSet<>(); - this.maxSpanInMillis = maxSpan.millis(); + this.maxSpanInNanos = maxSpan.nanos(); this.limit = limit; this.circuitBreaker = circuitBreaker; @@ -183,7 +183,7 @@ private void match(int stage, SequenceKey key, Ordinal ordinal, HitReference hit // // maxspan - if (maxSpanInMillis > 0 && (ordinal.timestamp() - sequence.startOrdinal().timestamp() > maxSpanInMillis)) { + if (maxSpanInNanos > 0 && ordinal.timestamp().delta(sequence.startOrdinal().timestamp()) > maxSpanInNanos) { stats.rejectionMaxspan++; return; } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java index ec4f4eb131026..345ded89fd706 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java @@ -65,7 +65,7 @@ public void query(QueryRequest r, ActionListener l) { int previous = ordinal - 1; // except the first request, the rest should have the previous response's search_after _shard_doc value assertArrayEquals("Elements at stage " + ordinal + " do not match", - r.searchSource().searchAfter(), new Object[] { (long) previous, implicitTiebreakerValues.get(previous) }); + r.searchSource().searchAfter(), new Object[] { String.valueOf(previous), implicitTiebreakerValues.get(previous) }); } long sortValue = implicitTiebreakerValues.get(ordinal); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java index a51066e8e3a29..d73a3f14d1f17 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.xpack.eql.execution.search.HitReference; import org.elasticsearch.xpack.eql.execution.search.QueryClient; import org.elasticsearch.xpack.eql.execution.search.QueryRequest; +import org.elasticsearch.xpack.eql.execution.search.Timestamp; import org.elasticsearch.xpack.eql.execution.sequence.SequenceMatcher; import org.elasticsearch.xpack.eql.execution.sequence.TumblingWindow; import org.elasticsearch.xpack.eql.session.Payload; @@ -85,8 +86,8 @@ static class TimestampExtractor extends EmptyHitExtractor { static final TimestampExtractor INSTANCE = new TimestampExtractor(); @Override - public Long extract(SearchHit hit) { - return (long) hit.docId(); + public Timestamp extract(SearchHit hit) { + return Timestamp.of(String.valueOf(hit.docId())); } } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java index 8ee25b0471b2c..a6240cef8bb8b 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java @@ -30,6 +30,7 @@ import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.eql.EqlTestUtils.randomSearchLongSortValues; import static org.elasticsearch.xpack.eql.EqlTestUtils.randomSearchSortValues; +import static org.elasticsearch.xpack.eql.execution.search.OrdinalTests.randomTimestamp; public class CriterionOrdinalExtractionTests extends ESTestCase { private String tsField = "timestamp"; @@ -40,7 +41,7 @@ public class CriterionOrdinalExtractionTests extends ESTestCase { private HitExtractor implicitTbExtractor = ImplicitTiebreakerHitExtractor.INSTANCE; public void testTimeOnly() throws Exception { - long time = randomLong(); + Object time = randomTimestamp(); long implicitTbValue = randomLong(); Ordinal ordinal = ordinal(searchHit(time, null, new Object[] { implicitTbValue }), false); assertEquals(time, ordinal.timestamp()); @@ -49,7 +50,7 @@ public void testTimeOnly() throws Exception { } public void testTimeAndTiebreaker() throws Exception { - long time = randomLong(); + Object time = randomTimestamp(); long tb = randomLong(); long implicitTbValue = randomLong(); Ordinal ordinal = ordinal(searchHit(time, tb, new Object[] { implicitTbValue }), true); @@ -59,7 +60,7 @@ public void testTimeAndTiebreaker() throws Exception { } public void testTimeAndTiebreakerNull() throws Exception { - long time = randomLong(); + Object time = randomTimestamp(); Ordinal ordinal = ordinal(searchHit(time, null), true); assertEquals(time, ordinal.timestamp()); assertNull(ordinal.tiebreaker()); @@ -74,7 +75,7 @@ public void testTimeNotComparable() throws Exception { } public void testImplicitTiebreakerMissing() throws Exception { - SearchHit hit = searchHit(randomLong(), null, new Object[0]); + SearchHit hit = searchHit(randomTimestamp(), null, new Object[0]); Criterion criterion = new Criterion(0, null, emptyList(), tsExtractor, null, implicitTbExtractor, randomBoolean()); EqlIllegalArgumentException exception = expectThrows(EqlIllegalArgumentException.class, () -> criterion.ordinal(hit)); @@ -82,7 +83,7 @@ public void testImplicitTiebreakerMissing() throws Exception { } public void testImplicitTiebreakerNotANumber() throws Exception { - SearchHit hit = searchHit(randomLong(), null, new Object[] { "test string" }); + SearchHit hit = searchHit(randomTimestamp(), null, new Object[] { "test string" }); Criterion criterion = new Criterion(0, null, emptyList(), tsExtractor, null, implicitTbExtractor, randomBoolean()); EqlIllegalArgumentException exception = expectThrows(EqlIllegalArgumentException.class, () -> criterion.ordinal(hit)); @@ -111,7 +112,7 @@ public String getWriteableName() { public void writeTo(StreamOutput out) throws IOException { } }; - SearchHit hit = searchHit(randomLong(), o); + SearchHit hit = searchHit(randomTimestamp(), o); Criterion criterion = new Criterion(0, null, emptyList(), tsExtractor, badExtractor, implicitTbExtractor, false); EqlIllegalArgumentException exception = expectThrows(EqlIllegalArgumentException.class, () -> criterion.ordinal(hit)); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/OrdinalTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/OrdinalTests.java index 4fe9f70d4a3d0..b527af7beba48 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/OrdinalTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/OrdinalTests.java @@ -13,16 +13,16 @@ public class OrdinalTests extends ESTestCase { public void testCompareToDifferentTs() { - long ts1 = randomLong(); - long ts2 = randomValueOtherThan(ts1, () -> randomLong()); + Timestamp ts1 = randomTimestamp(); + Timestamp ts2 = randomValueOtherThan(ts1, OrdinalTests::randomTimestamp); Ordinal one = new Ordinal(ts1, (Comparable) randomLong(), randomLong()); Ordinal two = new Ordinal(ts2, (Comparable) randomLong(), randomLong()); - assertEquals(Long.valueOf(one.timestamp()).compareTo(two.timestamp()), one.compareTo(two)); + assertEquals(ts1.compareTo(ts2), one.compareTo(two)); } public void testCompareToSameTsDifferentTie() { - long ts = randomLong(); + Timestamp ts = randomTimestamp(); Comparable tie1 = (Comparable) randomLong(); Comparable tie2 = randomValueOtherThan(tie1, () -> (Comparable) randomLong()); Ordinal one = new Ordinal(ts, tie1, randomLong()); @@ -32,7 +32,7 @@ public void testCompareToSameTsDifferentTie() { } public void testCompareToSameTsOneTieNull() { - long ts = randomLong(); + Timestamp ts = randomTimestamp(); Ordinal one = new Ordinal(ts, (Comparable) randomLong(), randomLong()); Ordinal two = new Ordinal(ts, null, randomLong()); @@ -40,7 +40,7 @@ public void testCompareToSameTsOneTieNull() { } public void testCompareToSameTsSameTieSameImplicitTb() { - long ts = randomLong(); + Timestamp ts = randomTimestamp(); Comparable c = randomLong(); long implicitTb = randomLong(); Ordinal one = new Ordinal(ts, c, implicitTb); @@ -52,7 +52,7 @@ public void testCompareToSameTsSameTieSameImplicitTb() { } public void testCompareToSameTsSameTieDifferentImplicitTb() { - long ts = randomLong(); + Timestamp ts = randomTimestamp(); Comparable c = randomLong(); long implicitTb = randomLong(); Ordinal one = new Ordinal(ts, c, implicitTb); @@ -62,7 +62,7 @@ public void testCompareToSameTsSameTieDifferentImplicitTb() { } public void testCompareToSameTsSameTieNullSameImplicitTb() { - long ts = randomLong(); + Timestamp ts = randomTimestamp(); long implicitTb = randomLong(); Ordinal one = new Ordinal(ts, null, implicitTb); Ordinal two = new Ordinal(ts, null, implicitTb); @@ -73,7 +73,7 @@ public void testCompareToSameTsSameTieNullSameImplicitTb() { } public void testCompareToSameTsSameTieNullDifferentImplicitTb() { - long ts = randomLong(); + Timestamp ts = randomTimestamp(); long implicitTb1 = randomLong(); long implicitTb2 = randomValueOtherThan(implicitTb1, () -> randomLong()); Ordinal one = new Ordinal(ts, null, implicitTb1); @@ -83,21 +83,21 @@ public void testCompareToSameTsSameTieNullDifferentImplicitTb() { } public void testTestBetween() { - Ordinal before = new Ordinal(randomLongBetween(1000, 2000), (Comparable) randomLong(), randomLong()); - Ordinal between = new Ordinal(randomLongBetween(3000, 4000), (Comparable) randomLong(), randomLong()); - Ordinal after = new Ordinal(randomLongBetween(5000, 6000), (Comparable) randomLong(), randomLong()); + Ordinal before = new Ordinal(randomTimestampBetween(1000, 2000), (Comparable) randomLong(), randomLong()); + Ordinal between = new Ordinal(randomTimestampBetween(3000, 4000), (Comparable) randomLong(), randomLong()); + Ordinal after = new Ordinal(randomTimestampBetween(5000, 6000), (Comparable) randomLong(), randomLong()); assertTrue(before.between(before, after)); assertTrue(after.between(before, after)); assertTrue(between.between(before, after)); - assertFalse(new Ordinal(randomLongBetween(0, 999), null, randomLong()).between(before, after)); - assertFalse(new Ordinal(randomLongBetween(7000, 8000), null, randomLong()).between(before, after)); + assertFalse(new Ordinal(randomTimestampBetween(0, 999), null, randomLong()).between(before, after)); + assertFalse(new Ordinal(randomTimestampBetween(7000, 8000), null, randomLong()).between(before, after)); } public void testTestBefore() { - Ordinal before = new Ordinal(randomLongBetween(1000, 2000), (Comparable) randomLong(), randomLong()); - Ordinal after = new Ordinal(randomLongBetween(5000, 6000), (Comparable) randomLong(), randomLong()); + Ordinal before = new Ordinal(randomTimestampBetween(1000, 2000), (Comparable) randomLong(), randomLong()); + Ordinal after = new Ordinal(randomTimestampBetween(5000, 6000), (Comparable) randomLong(), randomLong()); assertTrue(before.before(after)); assertFalse(before.before(before)); @@ -105,8 +105,8 @@ public void testTestBefore() { } public void testBeforeOrAt() { - Ordinal before = new Ordinal(randomLongBetween(1000, 2000), (Comparable) randomLong(), randomLong()); - Ordinal after = new Ordinal(randomLongBetween(5000, 6000), (Comparable) randomLong(), randomLong()); + Ordinal before = new Ordinal(randomTimestampBetween(1000, 2000), (Comparable) randomLong(), randomLong()); + Ordinal after = new Ordinal(randomTimestampBetween(5000, 6000), (Comparable) randomLong(), randomLong()); assertTrue(before.beforeOrAt(after)); assertTrue(before.beforeOrAt(before)); @@ -114,8 +114,8 @@ public void testBeforeOrAt() { } public void testTestAfter() { - Ordinal before = new Ordinal(randomLongBetween(1000, 2000), (Comparable) randomLong(), randomLong()); - Ordinal after = new Ordinal(randomLongBetween(5000, 6000), (Comparable) randomLong(), randomLong()); + Ordinal before = new Ordinal(randomTimestampBetween(1000, 2000), (Comparable) randomLong(), randomLong()); + Ordinal after = new Ordinal(randomTimestampBetween(5000, 6000), (Comparable) randomLong(), randomLong()); assertTrue(after.after(before)); assertFalse(after.after(after)); @@ -123,11 +123,24 @@ public void testTestAfter() { } public void testAfterOrAt() { - Ordinal before = new Ordinal(randomLongBetween(1000, 2000), (Comparable) randomLong(), randomLong()); - Ordinal after = new Ordinal(randomLongBetween(5000, 6000), (Comparable) randomLong(), randomLong()); + Ordinal before = new Ordinal(randomTimestampBetween(1000, 2000), (Comparable) randomLong(), randomLong()); + Ordinal after = new Ordinal(randomTimestampBetween(5000, 6000), (Comparable) randomLong(), randomLong()); assertTrue(after.afterOrAt(before)); assertTrue(after.afterOrAt(after)); assertFalse(before.afterOrAt(after)); } + + static Timestamp randomTimestamp() { + final long MAX_MICROS = 999_999L; + // range chosen so that the value is convertible to Instant + long millis = randomLongBetween(Long.MIN_VALUE + MAX_MICROS, Long.MAX_VALUE - MAX_MICROS); + String timestamp = millis + (randomBoolean() ? "" : "." + randomLongBetween(0, MAX_MICROS)); + return Timestamp.of(timestamp); + } + + static Timestamp randomTimestampBetween(long from, long to) { + long millis = randomLongBetween(from, to); + return Timestamp.of(millis + (randomBoolean() ? "" : ".0")); + } } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java index f5922921cc59a..2f3edabd6972b 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.eql.execution.search.Ordinal; import org.elasticsearch.xpack.eql.execution.search.QueryClient; import org.elasticsearch.xpack.eql.execution.search.QueryRequest; +import org.elasticsearch.xpack.eql.execution.search.Timestamp; import org.elasticsearch.xpack.eql.execution.search.extractor.ImplicitTiebreakerHitExtractor; import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; import java.io.IOException; @@ -120,7 +121,8 @@ public void testCircuitBreakerTumblingWindow() { public void testCircuitBreakerSequnceMatcher() { List> hits = new ArrayList<>(); for (int i = 0; i < 10; i++) { - hits.add(new Tuple<>(new KeyAndOrdinal(new SequenceKey(i), new Ordinal(i, o -> 1, 0)), new HitReference("index", i + ""))); + hits.add(new Tuple<>(new KeyAndOrdinal(new SequenceKey(i), new Ordinal(Timestamp.of(String.valueOf(i)), o -> 1, 0)), + new HitReference("index", i + ""))); } // Break on first iteration @@ -185,8 +187,8 @@ public String hitName() { } @Override - public Long extract(SearchHit hit) { - return (long) hit.docId(); + public Timestamp extract(SearchHit hit) { + return Timestamp.of(String.valueOf(hit.docId())); } } } From edcdd30b6213a72b8bd898776b6a1d71309db04d Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Wed, 1 Sep 2021 15:51:14 -0500 Subject: [PATCH 076/128] Compatible REST API - minor build clean up (#77145) This commit allows the compatible REST API tests to execute on Windows. They were previously excluded from Windows due to a command line limit when defining a very large exclusion list. That exclusion list is much smaller now and they will now execute properly on Windows. Also, an empty exclusion list has been removed from the build config. --- modules/lang-painless/build.gradle | 5 ----- rest-api-spec/build.gradle | 4 ---- 2 files changed, 9 deletions(-) diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index e7251dd7ea0c3..395daf771dfc7 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -58,11 +58,6 @@ tasks.named("test").configure { jvmArgs '-XX:-OmitStackTraceInFastThrow', '-XX:-HeapDumpOnOutOfMemoryError' } -tasks.named("yamlRestCompatTest").configure { - systemProperty 'tests.rest.blacklist', [ - ].join(',') -} - /* Build Javadoc for the Java classes in Painless's public API that are in the * Painless plugin */ tasks.register("apiJavadoc", Javadoc) { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 7558b033b8137..7f9a2ebd09411 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -40,10 +40,6 @@ testClusters.all { tasks.named("test").configure { enabled = false } tasks.named("jarHell").configure { enabled = false } tasks.named("yamlRestCompatTest").configure { - onlyIf { - // Skip these tests on Windows since the blacklist exceeds Windows CLI limits - OS.current() != OS.WINDOWS - } systemProperty 'tests.rest.blacklist', [ // Cat API are meant to be consumed by humans, so will not be supported by Compatible REST API 'cat*/*/*', From 6c4df63a2d1ec48adbdabbbe83972fd94009c359 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Thu, 2 Sep 2021 14:29:49 +1000 Subject: [PATCH 077/128] Fix randomAuthentication by removing duplicate key (#77159) randomArray sometimes generates duplicate entries and the subsequent conversion to a Map hence fails due to the duplicate key. This PR fixes the failure by ensure array elements are distinct before converting to a Map. The alternative is to generate distinct array entries from the beginning. But given how the code is used in the context, a simple distinct call is easier and sufficient. Resolves: #77127 --- .../xpack/core/security/authc/AuthenticationTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java index 1e102f8b49413..1ad0a8a5e238b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java @@ -170,7 +170,7 @@ public static Authentication randomAuthentication(User user, RealmRef realmRef) metadata = Map.of(randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8)); } else { metadata = Arrays.stream(randomArray(1, 5, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))) - .collect(Collectors.toMap(s -> s, s -> randomAlphaOfLengthBetween(3, 8))); + .distinct().collect(Collectors.toMap(s -> s, s -> randomAlphaOfLengthBetween(3, 8))); } if (randomBoolean()) { // run-as return new Authentication(new User(user.principal(), user.roles(), randomUser()), From b24a0575775b2d8274897edf9560519c48362bbe Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 2 Sep 2021 08:26:02 +0200 Subject: [PATCH 078/128] Execute EnrichPolicyRunner on a non dedicated master node. (#76881) Introduce an internal action that the execute policy action delegates to. This to ensure that the actual policy execution is never executed on the elected master node or dedicated master nodes. In case the cluster consists out of a single node then the internal action will attempt to execute on the current/local node. The actual enrich policy execution is encapsulated in the `EnrichPolicyRunner` class. This class manages the execution of several API calls, so this itself isn't doing anything heavy. However the coordination of these api calls (in particular the reindex api call) may involve some non-neglectable work/overhead and this shouldn't be performed on the elected master or any other dedicated master node. Closes #70436 --- .../xpack/enrich/EnrichMultiNodeIT.java | 62 ++++++ .../xpack/enrich/EnrichPlugin.java | 20 +- .../xpack/enrich/EnrichPolicyExecutor.java | 186 +++++------------- .../xpack/enrich/ExecuteEnrichPolicyTask.java | 13 +- .../action/InternalExecutePolicyAction.java | 176 +++++++++++++++++ .../TransportExecuteEnrichPolicyAction.java | 44 +---- .../enrich/EnrichPolicyExecutorTests.java | 179 +++++------------ .../InternalExecutePolicyActionTests.java | 140 +++++++++++++ .../xpack/security/operator/Constants.java | 1 + 9 files changed, 515 insertions(+), 306 deletions(-) create mode 100644 x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java create mode 100644 x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyActionTests.java diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java index 9e15cf5fb360c..cf19952a701f2 100644 --- a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java @@ -9,7 +9,9 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -45,11 +47,16 @@ import java.util.Set; import static org.elasticsearch.test.NodeRoles.ingestOnlyNode; +import static org.elasticsearch.test.NodeRoles.masterOnlyNode; import static org.elasticsearch.test.NodeRoles.nonIngestNode; +import static org.elasticsearch.test.NodeRoles.nonMasterNode; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) public class EnrichMultiNodeIT extends ESIntegTestCase { @@ -146,6 +153,61 @@ public void testEnrichNoIngestNodes() { assertThat(e.getMessage(), equalTo("no ingest nodes in this cluster")); } + public void testExecutePolicyWithDedicatedMasterNodes() throws Exception { + var masterNodes = internalCluster().startNodes(3, masterOnlyNode()); + var regularNodes = internalCluster().startNodes(2, nonMasterNode()); + ensureStableCluster(5, (String) null); + + assertAcked(prepareCreate(SOURCE_INDEX_NAME).setMapping(MATCH_FIELD, "type=keyword")); + var enrichPolicy = new EnrichPolicy( + EnrichPolicy.MATCH_TYPE, + null, + List.of(SOURCE_INDEX_NAME), + MATCH_FIELD, + List.of(DECORATE_FIELDS) + ); + var putPolicyRequest = new PutEnrichPolicyAction.Request(POLICY_NAME, enrichPolicy); + assertAcked(client().execute(PutEnrichPolicyAction.INSTANCE, putPolicyRequest).actionGet()); + var executePolicyRequest = new ExecuteEnrichPolicyAction.Request(POLICY_NAME); + executePolicyRequest.setWaitForCompletion(false); // From tne returned taks id the node that executes the policy can be determined + var executePolicyResponse = client().execute(ExecuteEnrichPolicyAction.INSTANCE, executePolicyRequest).actionGet(); + assertThat(executePolicyResponse.getStatus(), nullValue()); + assertThat(executePolicyResponse.getTaskId(), notNullValue()); + + var getTaskRequest = new GetTaskRequest().setTaskId(executePolicyResponse.getTaskId()).setWaitForCompletion(true); + client().admin().cluster().getTask(getTaskRequest).actionGet(); + + var discoNodes = client().admin().cluster().state(new ClusterStateRequest()).actionGet().getState().nodes(); + assertThat(discoNodes.get(executePolicyResponse.getTaskId().getNodeId()).isMasterNode(), is(false)); + } + + public void testExecutePolicyNeverOnElectedMaster() throws Exception { + internalCluster().startNodes(3); + ensureStableCluster(3, (String) null); + + assertAcked(prepareCreate(SOURCE_INDEX_NAME).setMapping(MATCH_FIELD, "type=keyword")); + var enrichPolicy = new EnrichPolicy( + EnrichPolicy.MATCH_TYPE, + null, + List.of(SOURCE_INDEX_NAME), + MATCH_FIELD, + List.of(DECORATE_FIELDS) + ); + var putPolicyRequest = new PutEnrichPolicyAction.Request(POLICY_NAME, enrichPolicy); + assertAcked(client().execute(PutEnrichPolicyAction.INSTANCE, putPolicyRequest).actionGet()); + var executePolicyRequest = new ExecuteEnrichPolicyAction.Request(POLICY_NAME); + executePolicyRequest.setWaitForCompletion(false); // From tne returned taks id the node that executes the policy can be determined + var executePolicyResponse = client().execute(ExecuteEnrichPolicyAction.INSTANCE, executePolicyRequest).actionGet(); + assertThat(executePolicyResponse.getStatus(), nullValue()); + assertThat(executePolicyResponse.getTaskId(), notNullValue()); + + var getTaskRequest = new GetTaskRequest().setTaskId(executePolicyResponse.getTaskId()).setWaitForCompletion(true); + client().admin().cluster().getTask(getTaskRequest).actionGet(); + + var discoNodes = client().admin().cluster().state(new ClusterStateRequest()).actionGet().getState().nodes(); + assertThat(executePolicyResponse.getTaskId().getNodeId(), not(equalTo(discoNodes.getMasterNodeId()))); + } + private static void enrich(List keys, String coordinatingNode) { int numDocs = 256; BulkRequest bulkRequest = new BulkRequest("my-index"); diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java index 7b7717d19fc19..71693ec631b4c 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.enrich.action.EnrichReindexAction; import org.elasticsearch.xpack.enrich.action.EnrichShardMultiSearchAction; import org.elasticsearch.xpack.enrich.action.EnrichUsageTransportAction; +import org.elasticsearch.xpack.enrich.action.InternalExecutePolicyAction; import org.elasticsearch.xpack.enrich.action.TransportDeleteEnrichPolicyAction; import org.elasticsearch.xpack.enrich.action.TransportEnrichReindexAction; import org.elasticsearch.xpack.enrich.action.TransportEnrichStatsAction; @@ -155,7 +156,8 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(EnrichCoordinatorProxyAction.INSTANCE, EnrichCoordinatorProxyAction.TransportAction.class), new ActionHandler<>(EnrichShardMultiSearchAction.INSTANCE, EnrichShardMultiSearchAction.TransportAction.class), new ActionHandler<>(EnrichCoordinatorStatsAction.INSTANCE, EnrichCoordinatorStatsAction.TransportAction.class), - new ActionHandler<>(EnrichReindexAction.INSTANCE, TransportEnrichReindexAction.class) + new ActionHandler<>(EnrichReindexAction.INSTANCE, TransportEnrichReindexAction.class), + new ActionHandler<>(InternalExecutePolicyAction.INSTANCE, InternalExecutePolicyAction.Transport.class) ); } @@ -192,6 +194,15 @@ public Collection createComponents( Supplier repositoriesServiceSupplier ) { EnrichPolicyLocks enrichPolicyLocks = new EnrichPolicyLocks(); + EnrichPolicyExecutor enrichPolicyExecutor = new EnrichPolicyExecutor( + settings, + clusterService, + client, + threadPool, + expressionResolver, + enrichPolicyLocks, + System::currentTimeMillis + ); EnrichPolicyMaintenanceService enrichPolicyMaintenanceService = new EnrichPolicyMaintenanceService( settings, client, @@ -200,7 +211,12 @@ public Collection createComponents( enrichPolicyLocks ); enrichPolicyMaintenanceService.initialize(); - return List.of(enrichPolicyLocks, new EnrichCoordinatorProxyAction.Coordinator(client, settings), enrichPolicyMaintenanceService); + return List.of( + enrichPolicyLocks, + new EnrichCoordinatorProxyAction.Coordinator(client, settings), + enrichPolicyMaintenanceService, + enrichPolicyExecutor + ); } @Override diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java index 8a7fd0f861651..583603af5339c 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java @@ -8,24 +8,19 @@ package org.elasticsearch.xpack.enrich; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskAwareRequest; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.tasks.TaskListener; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; +import org.elasticsearch.xpack.enrich.action.InternalExecutePolicyAction; -import java.util.Map; import java.util.concurrent.Semaphore; -import java.util.function.BiConsumer; import java.util.function.LongSupplier; public class EnrichPolicyExecutor { @@ -34,7 +29,6 @@ public class EnrichPolicyExecutor { private final ClusterService clusterService; private final Client client; - private final TaskManager taskManager; private final ThreadPool threadPool; private final IndexNameExpressionResolver indexNameExpressionResolver; private final LongSupplier nowSupplier; @@ -48,7 +42,6 @@ public EnrichPolicyExecutor( Settings settings, ClusterService clusterService, Client client, - TaskManager taskManager, ThreadPool threadPool, IndexNameExpressionResolver indexNameExpressionResolver, EnrichPolicyLocks policyLocks, @@ -56,7 +49,6 @@ public EnrichPolicyExecutor( ) { this.clusterService = clusterService; this.client = client; - this.taskManager = taskManager; this.threadPool = threadPool; this.indexNameExpressionResolver = indexNameExpressionResolver; this.nowSupplier = nowSupplier; @@ -67,6 +59,43 @@ public EnrichPolicyExecutor( this.policyExecutionPermits = new Semaphore(maximumConcurrentPolicyExecutions); } + public void coordinatePolicyExecution( + ExecuteEnrichPolicyAction.Request request, + ActionListener listener + ) { + tryLockingPolicy(request.getName()); + try { + client.execute(InternalExecutePolicyAction.INSTANCE, request, ActionListener.wrap(response -> { + if (response.getStatus() != null) { + releasePolicy(request.getName()); + listener.onResponse(response); + } else { + waitAndThenRelease(request.getName(), response); + listener.onResponse(response); + } + }, e -> { + releasePolicy(request.getName()); + listener.onFailure(e); + })); + } catch (Exception e) { + // Be sure to unlock if submission failed. + releasePolicy(request.getName()); + throw e; + } + } + + public void runPolicyLocally(ExecuteEnrichPolicyTask task, String policyName, ActionListener listener) { + try { + EnrichPolicy policy = EnrichStore.getPolicy(policyName, clusterService.state()); + task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.SCHEDULED)); + Runnable runnable = createPolicyRunner(policyName, policy, task, listener); + threadPool.executor(ThreadPool.Names.GENERIC).execute(runnable); + } catch (Exception e) { + task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.FAILED)); + throw e; + } + } + private void tryLockingPolicy(String policyName) { policyLocks.lockPolicy(policyName); if (policyExecutionPermits.tryAcquire() == false) { @@ -91,49 +120,14 @@ private void releasePolicy(String policyName) { } } - private class PolicyCompletionListener implements ActionListener { - private final String policyName; - private final ExecuteEnrichPolicyTask task; - private final BiConsumer onResponse; - private final BiConsumer onFailure; - - PolicyCompletionListener( - String policyName, - ExecuteEnrichPolicyTask task, - BiConsumer onResponse, - BiConsumer onFailure - ) { - this.policyName = policyName; - this.task = task; - this.onResponse = onResponse; - this.onFailure = onFailure; - } - - @Override - public void onResponse(ExecuteEnrichPolicyStatus status) { - assert ExecuteEnrichPolicyStatus.PolicyPhases.COMPLETE.equals(status.getPhase()) : "incomplete task returned"; - releasePolicy(policyName); - try { - taskManager.unregister(task); - } finally { - onResponse.accept(task, status); - } - } - - @Override - public void onFailure(Exception e) { - // Set task status to failed to avoid having to catch and rethrow exceptions everywhere - task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.FAILED)); - releasePolicy(policyName); - try { - taskManager.unregister(task); - } finally { - onFailure.accept(task, e); - } - } + private void waitAndThenRelease(String policyName, ExecuteEnrichPolicyAction.Response response) { + GetTaskRequest getTaskRequest = new GetTaskRequest(); + getTaskRequest.setTaskId(response.getTaskId()); + getTaskRequest.setWaitForCompletion(true); + client.admin().cluster().getTask(getTaskRequest, ActionListener.wrap(() -> releasePolicy(policyName))); } - protected Runnable createPolicyRunner( + private Runnable createPolicyRunner( String policyName, EnrichPolicy policy, ExecuteEnrichPolicyTask task, @@ -153,94 +147,4 @@ protected Runnable createPolicyRunner( ); } - private EnrichPolicy getPolicy(ExecuteEnrichPolicyAction.Request request) { - // Look up policy in policy store and execute it - EnrichPolicy policy = EnrichStore.getPolicy(request.getName(), clusterService.state()); - if (policy == null) { - throw new IllegalArgumentException("Policy execution failed. Could not locate policy with id [" + request.getName() + "]"); - } - return policy; - } - - public Task runPolicy(ExecuteEnrichPolicyAction.Request request, ActionListener listener) { - return runPolicy(request, getPolicy(request), listener); - } - - public Task runPolicy(ExecuteEnrichPolicyAction.Request request, TaskListener listener) { - return runPolicy(request, getPolicy(request), listener); - } - - public Task runPolicy( - ExecuteEnrichPolicyAction.Request request, - EnrichPolicy policy, - ActionListener listener - ) { - return runPolicy(request, policy, (t, r) -> listener.onResponse(r), (t, e) -> listener.onFailure(e)); - } - - public Task runPolicy( - ExecuteEnrichPolicyAction.Request request, - EnrichPolicy policy, - TaskListener listener - ) { - return runPolicy(request, policy, listener::onResponse, listener::onFailure); - } - - private Task runPolicy( - ExecuteEnrichPolicyAction.Request request, - EnrichPolicy policy, - BiConsumer onResponse, - BiConsumer onFailure - ) { - tryLockingPolicy(request.getName()); - try { - return runPolicyTask(request, policy, onResponse, onFailure); - } catch (Exception e) { - // Be sure to unlock if submission failed. - releasePolicy(request.getName()); - throw e; - } - } - - private Task runPolicyTask( - final ExecuteEnrichPolicyAction.Request request, - EnrichPolicy policy, - BiConsumer onResponse, - BiConsumer onFailure - ) { - Task asyncTask = taskManager.register("enrich", TASK_ACTION, new TaskAwareRequest() { - @Override - public void setParentTask(TaskId taskId) { - request.setParentTask(taskId); - } - - @Override - public TaskId getParentTask() { - return request.getParentTask(); - } - - @Override - public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new ExecuteEnrichPolicyTask(id, type, action, getDescription(), parentTaskId, headers); - } - - @Override - public String getDescription() { - return request.getName(); - } - }); - ExecuteEnrichPolicyTask task = (ExecuteEnrichPolicyTask) asyncTask; - try { - task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.SCHEDULED)); - PolicyCompletionListener completionListener = new PolicyCompletionListener(request.getName(), task, onResponse, onFailure); - Runnable runnable = createPolicyRunner(request.getName(), policy, task, completionListener); - threadPool.executor(ThreadPool.Names.GENERIC).execute(runnable); - return asyncTask; - } catch (Exception e) { - // Unregister task in case of exception - task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.FAILED)); - taskManager.unregister(asyncTask); - throw e; - } - } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/ExecuteEnrichPolicyTask.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/ExecuteEnrichPolicyTask.java index 7a525e884c978..7ab567839562a 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/ExecuteEnrichPolicyTask.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/ExecuteEnrichPolicyTask.java @@ -12,11 +12,18 @@ import java.util.Map; -class ExecuteEnrichPolicyTask extends Task { +public class ExecuteEnrichPolicyTask extends Task { private volatile ExecuteEnrichPolicyStatus status; - ExecuteEnrichPolicyTask(long id, String type, String action, String description, TaskId parentTask, Map headers) { + public ExecuteEnrichPolicyTask( + long id, + String type, + String action, + String description, + TaskId parentTask, + Map headers + ) { super(id, type, action, description, parentTask, headers); } @@ -25,7 +32,7 @@ public Status getStatus() { return status; } - void setStatus(ExecuteEnrichPolicyStatus status) { + public void setStatus(ExecuteEnrichPolicyStatus status) { this.status = status; } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java new file mode 100644 index 0000000000000..cc7de92cbb8c1 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java @@ -0,0 +1,176 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskAwareRequest; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; +import org.elasticsearch.xpack.enrich.EnrichPolicyExecutor; +import org.elasticsearch.xpack.enrich.ExecuteEnrichPolicyTask; + +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction.Request; +import static org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction.Response; +import static org.elasticsearch.xpack.enrich.EnrichPolicyExecutor.TASK_ACTION; + +/** + * An internal action runs {@link org.elasticsearch.xpack.enrich.EnrichPolicyRunner} and ensures that: + *
    + *
  • In case the cluster has more than one node, the policy runner isn't executed on the elected master + *
  • Additionally, if the cluster has master only nodes then the policy runner isn't executed on these nodes. + *
+ * + * The {@link TransportExecuteEnrichPolicyAction} is a transport action that runs on the elected master node and + * the actual policy execution may be heavy for the elected master node. + * Although {@link org.elasticsearch.xpack.enrich.EnrichPolicyRunner} doesn't do heavy operations, the coordination + * of certain operations may have a non-negligible overhead (for example the coordination of the reindex step). + */ +public class InternalExecutePolicyAction extends ActionType { + + private static final Logger LOGGER = LogManager.getLogger(InternalExecutePolicyAction.class); + public static final InternalExecutePolicyAction INSTANCE = new InternalExecutePolicyAction(); + public static final String NAME = "cluster:admin/xpack/enrich/internal_execute"; + + private InternalExecutePolicyAction() { + super(NAME, Response::new); + } + + public static class Transport extends HandledTransportAction { + + private final ClusterService clusterService; + private final TransportService transportService; + private final EnrichPolicyExecutor policyExecutor; + private final AtomicInteger nodeGenerator = new AtomicInteger(Randomness.get().nextInt()); + + @Inject + public Transport( + TransportService transportService, + ActionFilters actionFilters, + ClusterService clusterService, + EnrichPolicyExecutor policyExecutor + ) { + super(NAME, transportService, actionFilters, Request::new); + this.clusterService = clusterService; + this.transportService = transportService; + this.policyExecutor = policyExecutor; + } + + @Override + protected void doExecute(Task transportTask, Request request, ActionListener actionListener) { + var clusterState = clusterService.state(); + var targetNode = selectNodeForPolicyExecution(clusterState.nodes()); + if (clusterState.nodes().getLocalNode().equals(targetNode) == false) { + var handler = new ActionListenerResponseHandler<>(actionListener, Response::new); + transportService.sendRequest(targetNode, NAME, request, handler); + return; + } + + // Can't use provided task, because in the case wait_for_completion=false then + // as soon as actionListener#onResponse is invoked then the provided task get unregistered and + // then there no way to see the policy execution in the list tasks or get task APIs. + var task = (ExecuteEnrichPolicyTask) taskManager.register("enrich", TASK_ACTION, new TaskAwareRequest() { + + @Override + public void setParentTask(TaskId taskId) { + request.setParentTask(taskId); + } + + @Override + public TaskId getParentTask() { + return request.getParentTask(); + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + String description = "executing enrich policy [" + request.getName() + "]"; + return new ExecuteEnrichPolicyTask(id, type, action, description, parentTaskId, headers); + } + }); + + try { + ActionListener listener; + if (request.isWaitForCompletion()) { + listener = ActionListener.wrap(result -> actionListener.onResponse(new Response(result)), actionListener::onFailure); + } else { + listener = ActionListener.wrap( + result -> LOGGER.debug("successfully executed policy [{}]", request.getName()), + e -> LOGGER.error("failed to execute policy [" + request.getName() + "]", e) + ); + } + policyExecutor.runPolicyLocally(task, request.getName(), ActionListener.wrap(result -> { + taskManager.unregister(task); + listener.onResponse(result); + }, e -> { + taskManager.unregister(task); + listener.onFailure(e); + })); + + if (request.isWaitForCompletion() == false) { + TaskId taskId = new TaskId(clusterState.nodes().getLocalNodeId(), task.getId()); + actionListener.onResponse(new Response(taskId)); + } + } catch (Exception e) { + taskManager.unregister(task); + throw e; + } + } + + DiscoveryNode selectNodeForPolicyExecution(DiscoveryNodes discoNodes) { + if (discoNodes.getIngestNodes().isEmpty()) { + // if we don't fail here then reindex will fail with a more complicated error. + // (EnrichPolicyRunner uses a pipeline with reindex) + throw new IllegalStateException("no ingest nodes in this cluster"); + } + // In case of a single node cluster: + if (discoNodes.getSize() == 1) { + return discoNodes.getLocalNode(); + } + // This check exists to avoid redirecting potentially many times: + if (discoNodes.isLocalNodeElectedMaster() == false) { + // This method is first executed on the elected master node (via execute enrich policy action) + // a node is picked and the request is redirected to that node. + // Whatever node has been picked in the previous execution of the filters below should execute and + // attempt not pick another node. + return discoNodes.getLocalNode(); + } + + final var nodes = discoNodes.getAllNodes() + .stream() + // filter out elected master node (which is the local node) + .filter(discoNode -> discoNode.getId().equals(discoNodes.getMasterNodeId()) == false) + // filter out dedicated master nodes + .filter(discoNode -> discoNode.getRoles().equals(Set.of(DiscoveryNodeRole.MASTER_ROLE)) == false) + // Filter out nodes that don't have this action yet + .filter(discoNode -> discoNode.getVersion().onOrAfter(Version.V_8_0_0)) + .toArray(DiscoveryNode[]::new); + if (nodes.length == 0) { + throw new IllegalStateException("no suitable node was found to perform enrich policy execution"); + } + return nodes[Math.floorMod(nodeGenerator.incrementAndGet(), nodes.length)]; + } + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportExecuteEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportExecuteEnrichPolicyAction.java index 1bc592ccb212f..637ba472b0192 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportExecuteEnrichPolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportExecuteEnrichPolicyAction.java @@ -9,23 +9,25 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.tasks.LoggingTaskListener; import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; import org.elasticsearch.xpack.enrich.EnrichPolicyExecutor; -import org.elasticsearch.xpack.enrich.EnrichPolicyLocks; +/** + * Coordinates enrich policy executions. This is a master node action, + * so that policy executions can be accounted. For example that no more + * than X policy executions occur or only a single policy execution occurs + * for each policy. The actual execution of the enrich policy is performed + * via {@link InternalExecutePolicyAction}. + */ public class TransportExecuteEnrichPolicyAction extends TransportMasterNodeAction< ExecuteEnrichPolicyAction.Request, ExecuteEnrichPolicyAction.Response> { @@ -34,14 +36,12 @@ public class TransportExecuteEnrichPolicyAction extends TransportMasterNodeActio @Inject public TransportExecuteEnrichPolicyAction( - Settings settings, - Client client, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - EnrichPolicyLocks enrichPolicyLocks + EnrichPolicyExecutor enrichPolicyExecutor ) { super( ExecuteEnrichPolicyAction.NAME, @@ -54,16 +54,7 @@ public TransportExecuteEnrichPolicyAction( ExecuteEnrichPolicyAction.Response::new, ThreadPool.Names.SAME ); - this.executor = new EnrichPolicyExecutor( - settings, - clusterService, - client, - transportService.getTaskManager(), - threadPool, - indexNameExpressionResolver, - enrichPolicyLocks, - System::currentTimeMillis - ); + this.executor = enrichPolicyExecutor; } @Override @@ -73,22 +64,7 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { - if (state.getNodes().getIngestNodes().isEmpty()) { - // if we don't fail here then reindex will fail with a more complicated error. - // (EnrichPolicyRunner uses a pipeline with reindex) - throw new IllegalStateException("no ingest nodes in this cluster"); - } - - if (request.isWaitForCompletion()) { - executor.runPolicy( - request, - listener.delegateFailure((l, executionStatus) -> l.onResponse(new ExecuteEnrichPolicyAction.Response(executionStatus))) - ); - } else { - Task executeTask = executor.runPolicy(request, LoggingTaskListener.instance()); - TaskId taskId = new TaskId(clusterService.localNode().getId(), executeTask.getId()); - listener.onResponse(new ExecuteEnrichPolicyAction.Response(taskId)); - } + executor.coordinatePolicyExecution(request, listener); } @Override diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java index eb049943e86b4..ccb4b78c3d05b 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java @@ -8,38 +8,33 @@ package org.elasticsearch.xpack.enrich; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.indices.TestIndexNameExpressionResolver; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; -import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; import org.junit.AfterClass; import org.junit.BeforeClass; -import java.util.Collections; -import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.function.LongSupplier; import static org.hamcrest.CoreMatchers.containsString; public class EnrichPolicyExecutorTests extends ESTestCase { private static ThreadPool testThreadPool; - private static TaskManager testTaskManager; - private static final ActionListener noOpListener = new ActionListener<>() { + private static final ActionListener noOpListener = new ActionListener<>() { @Override - public void onResponse(ExecuteEnrichPolicyStatus ignored) {} + public void onResponse(ExecuteEnrichPolicyAction.Response ignored) {} @Override public void onFailure(Exception e) {} @@ -48,7 +43,6 @@ public void onFailure(Exception e) {} @BeforeClass public static void beforeCLass() { testThreadPool = new TestThreadPool("EnrichPolicyExecutorTests"); - testTaskManager = new TaskManager(Settings.EMPTY, testThreadPool, Collections.emptySet()); } @AfterClass @@ -56,102 +50,24 @@ public static void afterClass() { ThreadPool.terminate(testThreadPool, 30, TimeUnit.SECONDS); } - /** - * A policy runner drop-in replacement that just waits on a given countdown latch, and reports success after the latch is counted down. - */ - private static class BlockingTestPolicyRunner implements Runnable { - private final CountDownLatch latch; - private final ExecuteEnrichPolicyTask task; - private final ActionListener listener; - - BlockingTestPolicyRunner(CountDownLatch latch, ExecuteEnrichPolicyTask task, ActionListener listener) { - this.latch = latch; - this.task = task; - this.listener = listener; - } - - @Override - public void run() { - try { - task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.RUNNING)); - latch.await(); - ExecuteEnrichPolicyStatus newStatus = new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.COMPLETE); - task.setStatus(newStatus); - listener.onResponse(newStatus); - } catch (InterruptedException e) { - throw new RuntimeException("Interrupted waiting for test framework to continue the test", e); - } - } - } - - /** - * A mocked policy executor that accepts policy execution requests which block until the returned latch is decremented. Allows for - * controlling the timing for "in flight" policy executions to test for correct locking logic. - */ - private static class EnrichPolicyTestExecutor extends EnrichPolicyExecutor { - - EnrichPolicyTestExecutor( - Settings settings, - ClusterService clusterService, - Client client, - TaskManager taskManager, - ThreadPool threadPool, - IndexNameExpressionResolver indexNameExpressionResolver, - LongSupplier nowSupplier - ) { - super( - settings, - clusterService, - client, - taskManager, - threadPool, - indexNameExpressionResolver, - new EnrichPolicyLocks(), - nowSupplier - ); - } - - private CountDownLatch currentLatch; - - CountDownLatch testRunPolicy(String policyName, EnrichPolicy policy, ActionListener listener) { - currentLatch = new CountDownLatch(1); - ExecuteEnrichPolicyAction.Request request = new ExecuteEnrichPolicyAction.Request(policyName); - runPolicy(request, policy, listener); - return currentLatch; - } - - @Override - protected Runnable createPolicyRunner( - String policyName, - EnrichPolicy policy, - ExecuteEnrichPolicyTask task, - ActionListener listener - ) { - if (currentLatch == null) { - throw new IllegalStateException("Use the testRunPolicy method on this test instance"); - } - return new BlockingTestPolicyRunner(currentLatch, task, listener); - } - } - - public void testNonConcurrentPolicyExecution() throws InterruptedException { + public void testNonConcurrentPolicyCoordination() throws InterruptedException { String testPolicyName = "test_policy"; - EnrichPolicy testPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of("some_index"), "keyfield", List.of("valuefield")); - final EnrichPolicyTestExecutor testExecutor = new EnrichPolicyTestExecutor( + CountDownLatch latch = new CountDownLatch(1); + Client client = getClient(latch); + final EnrichPolicyExecutor testExecutor = new EnrichPolicyExecutor( Settings.EMPTY, null, - null, - testTaskManager, + client, testThreadPool, TestIndexNameExpressionResolver.newInstance(testThreadPool.getThreadContext()), + new EnrichPolicyLocks(), ESTestCase::randomNonNegativeLong ); // Launch a fake policy run that will block until firstTaskBlock is counted down. final CountDownLatch firstTaskComplete = new CountDownLatch(1); - final CountDownLatch firstTaskBlock = testExecutor.testRunPolicy( - testPolicyName, - testPolicy, + testExecutor.coordinatePolicyExecution( + new ExecuteEnrichPolicyAction.Request(testPolicyName), new LatchedActionListener<>(noOpListener, firstTaskComplete) ); @@ -160,17 +76,16 @@ public void testNonConcurrentPolicyExecution() throws InterruptedException { EsRejectedExecutionException.class, "Expected exception but nothing was thrown", () -> { - CountDownLatch countDownLatch = testExecutor.testRunPolicy(testPolicyName, testPolicy, noOpListener); + testExecutor.coordinatePolicyExecution(new ExecuteEnrichPolicyAction.Request(testPolicyName), noOpListener); // Should throw exception on the previous statement, but if it doesn't, be a // good citizen and conclude the fake runs to keep the logs clean from interrupted exceptions - countDownLatch.countDown(); - firstTaskBlock.countDown(); + latch.countDown(); firstTaskComplete.await(); } ); // Conclude the first mock run - firstTaskBlock.countDown(); + latch.countDown(); firstTaskComplete.await(); // Validate exception from second run @@ -181,41 +96,38 @@ public void testNonConcurrentPolicyExecution() throws InterruptedException { // Ensure that the lock from the previous run has been cleared CountDownLatch secondTaskComplete = new CountDownLatch(1); - CountDownLatch secondTaskBlock = testExecutor.testRunPolicy( - testPolicyName, - testPolicy, + testExecutor.coordinatePolicyExecution( + new ExecuteEnrichPolicyAction.Request(testPolicyName), new LatchedActionListener<>(noOpListener, secondTaskComplete) ); - secondTaskBlock.countDown(); secondTaskComplete.await(); } public void testMaximumPolicyExecutionLimit() throws InterruptedException { String testPolicyBaseName = "test_policy_"; Settings testSettings = Settings.builder().put(EnrichPlugin.ENRICH_MAX_CONCURRENT_POLICY_EXECUTIONS.getKey(), 2).build(); - EnrichPolicy testPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of("some_index"), "keyfield", List.of("valuefield")); - final EnrichPolicyTestExecutor testExecutor = new EnrichPolicyTestExecutor( + CountDownLatch latch = new CountDownLatch(1); + Client client = getClient(latch); + final EnrichPolicyExecutor testExecutor = new EnrichPolicyExecutor( testSettings, null, - null, - testTaskManager, + client, testThreadPool, TestIndexNameExpressionResolver.newInstance(testThreadPool.getThreadContext()), + new EnrichPolicyLocks(), ESTestCase::randomNonNegativeLong ); // Launch a two fake policy runs that will block until counted down to use up the maximum concurrent final CountDownLatch firstTaskComplete = new CountDownLatch(1); - final CountDownLatch firstTaskBlock = testExecutor.testRunPolicy( - testPolicyBaseName + "1", - testPolicy, + testExecutor.coordinatePolicyExecution( + new ExecuteEnrichPolicyAction.Request(testPolicyBaseName + "1"), new LatchedActionListener<>(noOpListener, firstTaskComplete) ); final CountDownLatch secondTaskComplete = new CountDownLatch(1); - final CountDownLatch secondTaskBlock = testExecutor.testRunPolicy( - testPolicyBaseName + "2", - testPolicy, + testExecutor.coordinatePolicyExecution( + new ExecuteEnrichPolicyAction.Request(testPolicyBaseName + "2"), new LatchedActionListener<>(noOpListener, secondTaskComplete) ); @@ -224,20 +136,17 @@ public void testMaximumPolicyExecutionLimit() throws InterruptedException { EsRejectedExecutionException.class, "Expected exception but nothing was thrown", () -> { - CountDownLatch countDownLatch = testExecutor.testRunPolicy(testPolicyBaseName + "3", testPolicy, noOpListener); + testExecutor.coordinatePolicyExecution(new ExecuteEnrichPolicyAction.Request(testPolicyBaseName + "3"), noOpListener); // Should throw exception on the previous statement, but if it doesn't, be a // good citizen and conclude the fake runs to keep the logs clean from interrupted exceptions - countDownLatch.countDown(); - firstTaskBlock.countDown(); - secondTaskBlock.countDown(); + latch.countDown(); firstTaskComplete.await(); secondTaskComplete.await(); } ); // Conclude the first mock run - firstTaskBlock.countDown(); - secondTaskBlock.countDown(); + latch.countDown(); firstTaskComplete.await(); secondTaskComplete.await(); @@ -251,12 +160,30 @@ public void testMaximumPolicyExecutionLimit() throws InterruptedException { // Ensure that the lock from the previous run has been cleared CountDownLatch finalTaskComplete = new CountDownLatch(1); - CountDownLatch finalTaskBlock = testExecutor.testRunPolicy( - testPolicyBaseName + "1", - testPolicy, + testExecutor.coordinatePolicyExecution( + new ExecuteEnrichPolicyAction.Request(testPolicyBaseName + "1"), new LatchedActionListener<>(noOpListener, finalTaskComplete) ); - finalTaskBlock.countDown(); finalTaskComplete.await(); } + + private Client getClient(CountDownLatch latch) { + return new NoOpClient(testThreadPool) { + @Override + protected void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + testThreadPool.generic().execute(() -> { + try { + latch.await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + super.doExecute(action, request, listener); + }); + } + }; + } } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyActionTests.java new file mode 100644 index 0000000000000..5ea04b5141f7e --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyActionTests.java @@ -0,0 +1,140 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.Version; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.TransportService; +import org.junit.Before; + +import java.util.Set; + +import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; + +public class InternalExecutePolicyActionTests extends ESTestCase { + + private InternalExecutePolicyAction.Transport transportAction; + + @Before + public void instantiateTransportAction() { + transportAction = new InternalExecutePolicyAction.Transport(mock(TransportService.class), mock(ActionFilters.class), null, null); + } + + public void testSelectNodeForPolicyExecution() { + var node1 = newNode(randomAlphaOfLength(4)); + var node2 = newNode(randomAlphaOfLength(4)); + var node3 = newNode(randomAlphaOfLength(4)); + var discoNodes = DiscoveryNodes.builder() + .add(node1) + .add(node2) + .add(node3) + .masterNodeId(node1.getId()) + .localNodeId(node1.getId()) + .build(); + var result = transportAction.selectNodeForPolicyExecution(discoNodes); + assertThat(result, either(equalTo(node2)).or(equalTo(node3))); + } + + public void testSelectNodeForPolicyExecutionSingleNode() { + var node1 = newNode(randomAlphaOfLength(4)); + var discoNodes = DiscoveryNodes.builder().add(node1).masterNodeId(node1.getId()).localNodeId(node1.getId()).build(); + var result = transportAction.selectNodeForPolicyExecution(discoNodes); + assertThat(result, equalTo(node1)); + } + + public void testSelectNodeForPolicyExecutionDedicatedMasters() { + var roles = Set.of(DiscoveryNodeRole.MASTER_ROLE); + var node1 = newNode(randomAlphaOfLength(4), roles); + var node2 = newNode(randomAlphaOfLength(4), roles); + var node3 = newNode(randomAlphaOfLength(4), roles); + var node4 = newNode(randomAlphaOfLength(4)); + var node5 = newNode(randomAlphaOfLength(4)); + var node6 = newNode(randomAlphaOfLength(4)); + var discoNodes = DiscoveryNodes.builder() + .add(node1) + .add(node2) + .add(node3) + .add(node4) + .add(node5) + .add(node6) + .masterNodeId(node2.getId()) + .localNodeId(node2.getId()) + .build(); + var result = transportAction.selectNodeForPolicyExecution(discoNodes); + assertThat(result, either(equalTo(node4)).or(equalTo(node5)).or(equalTo(node6))); + } + + public void testSelectNodeForPolicyExecutionNoNodeWithIngestRole() { + var roles = Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE); + var node1 = newNode(randomAlphaOfLength(4), roles); + var node2 = newNode(randomAlphaOfLength(4), roles); + var node3 = newNode(randomAlphaOfLength(4), roles); + var discoNodes = DiscoveryNodes.builder() + .add(node1) + .add(node2) + .add(node3) + .masterNodeId(node1.getId()) + .localNodeId(node1.getId()) + .build(); + var e = expectThrows(IllegalStateException.class, () -> transportAction.selectNodeForPolicyExecution(discoNodes)); + assertThat(e.getMessage(), equalTo("no ingest nodes in this cluster")); + } + + public void testSelectNodeForPolicyExecutionMixedVersions() { + var node1 = newNode(randomAlphaOfLength(4), Version.V_7_16_0); + var node2 = newNode(randomAlphaOfLength(4), Version.V_7_16_0); + var node3 = newNode(randomAlphaOfLength(4)); + var discoNodes = DiscoveryNodes.builder() + .add(node1) + .add(node2) + .add(node3) + .masterNodeId(node3.getId()) + .localNodeId(node3.getId()) + .build(); + var e = expectThrows(IllegalStateException.class, () -> transportAction.selectNodeForPolicyExecution(discoNodes)); + assertThat(e.getMessage(), equalTo("no suitable node was found to perform enrich policy execution")); + } + + public void testSelectNodeForPolicyExecutionPickLocalNodeIfNotElectedMaster() { + var node1 = newNode(randomAlphaOfLength(4)); + var node2 = newNode(randomAlphaOfLength(4)); + var node3 = newNode(randomAlphaOfLength(4)); + var discoNodes = DiscoveryNodes.builder() + .add(node1) + .add(node2) + .add(node3) + .masterNodeId(node1.getId()) + .localNodeId(node2.getId()) + .build(); + var result = transportAction.selectNodeForPolicyExecution(discoNodes); + assertThat(result, equalTo(node2)); + } + + private static DiscoveryNode newNode(String nodeId) { + return newNode(nodeId, Version.V_8_0_0); + } + + private static DiscoveryNode newNode(String nodeId, Version version) { + var roles = Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE, DiscoveryNodeRole.INGEST_ROLE); + return newNode(nodeId, roles, version); + } + + private static DiscoveryNode newNode(String nodeId, Set roles) { + return newNode(nodeId, roles, Version.V_8_0_0); + } + + private static DiscoveryNode newNode(String nodeId, Set roles, Version version) { + return new DiscoveryNode(nodeId, buildNewFakeTransportAddress(), emptyMap(), roles, version); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 8f53a2e17e3a4..c59decd4d96f4 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -104,6 +104,7 @@ public class Constants { "cluster:admin/xpack/deprecation/nodes/info", "cluster:admin/xpack/enrich/delete", "cluster:admin/xpack/enrich/execute", + "cluster:admin/xpack/enrich/internal_execute", "cluster:admin/xpack/enrich/get", "cluster:admin/xpack/enrich/put", "cluster:admin/xpack/enrich/reindex", From 27062e5039de4239f9f7bab067648832fd512336 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Thu, 2 Sep 2021 09:28:29 +0300 Subject: [PATCH 079/128] Prioritize IPv4 addresses in the enrollment token (#76898) The enrollment token contains a list of addresses where the node is listening for requests in the HTTP layer. This change makes it so we order that list before serializing it in the token prioritizing IPv4 addresses vs IPv6 addresses. The reasoning is that - it is usually more probable for IPv4 addresses to be reachable so in the case where the consumer of the token needs to iterate through available addresses, this might lead to getting a response with fewer attempts - When persisted to configuration files and/or shown in UIs, IPv4 addresses look more familiar (less surprising) to users --- .../enrollment/EnrollmentTokenGenerator.java | 44 +++++++++++++++---- .../EnrollmentTokenGeneratorTests.java | 20 +++++---- 2 files changed, 47 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/EnrollmentTokenGenerator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/EnrollmentTokenGenerator.java index 69107411bb5ee..979a512db2891 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/EnrollmentTokenGenerator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/EnrollmentTokenGenerator.java @@ -31,6 +31,8 @@ import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; +import java.net.Inet4Address; +import java.net.Inet6Address; import java.net.InetAddress; import java.net.MalformedURLException; import java.net.URI; @@ -169,13 +171,13 @@ protected Tuple, String> getNodeInfo(String user, SecureString pass httpResponseHttp.getResponseBody()); throw new IllegalStateException("No bound addresses found in response from calling GET " + httpInfoUrl); } - final List filtered_addresses = getFilteredAddresses(addresses); + final List filteredAddresses = getFilteredAddresses(addresses); final String stackVersion = getVersion(httpResponseHttp.getResponseBody()); if (stackVersion == null || stackVersion.isEmpty()) { throw new IllegalStateException("Could not retrieve the version."); } - return new Tuple<>(filtered_addresses, stackVersion); + return new Tuple<>(filteredAddresses, stackVersion); } protected String getCaFingerprint() throws Exception { @@ -199,14 +201,38 @@ protected String getCaFingerprint() throws Exception { } static List getFilteredAddresses(List addresses) throws Exception { - List filtered_addresses = new ArrayList<>(); - for (String bound_address : addresses){ - URI uri = new URI("http://" + bound_address); - InetAddress inet_address = InetAddress.getByName(uri.getHost()); - if (inet_address.isLoopbackAddress() != true) { - filtered_addresses.add(bound_address); + List filteredAddresses = new ArrayList<>(); + for (String boundAddress : addresses){ + InetAddress inetAddress = getInetAddressFromString(boundAddress); + if (inetAddress.isLoopbackAddress() != true) { + filteredAddresses.add(boundAddress); } } - return filtered_addresses.isEmpty() ? addresses : filtered_addresses; + if (filteredAddresses.isEmpty()) { + filteredAddresses = addresses; + } + // Sort the list prioritizing IPv4 addresses when possible, as it is more probable to be reachable when token consumer iterates + // addresses for the initial node and it is less surprising for users to see in the UI or config + filteredAddresses.sort((String a, String b) -> { + try { + final InetAddress addressA = getInetAddressFromString(a); + final InetAddress addressB = getInetAddressFromString(b); + if (addressA instanceof Inet4Address && addressB instanceof Inet6Address) { + return -1; + } else if (addressA instanceof Inet6Address && addressB instanceof Inet4Address) { + return 1; + } else { + return 0; + } + } catch (Exception e) { + return 0; + } + }); + return filteredAddresses; + } + + private static InetAddress getInetAddressFromString(String address) throws Exception { + URI uri = new URI("http://" + address); + return InetAddress.getByName(uri.getHost()); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/EnrollmentTokenGeneratorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/EnrollmentTokenGeneratorTests.java index df79e06e1aa9d..7a40c57b82726 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/EnrollmentTokenGeneratorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/enrollment/EnrollmentTokenGeneratorTests.java @@ -42,6 +42,7 @@ import static org.elasticsearch.test.CheckedFunctionUtils.anyCheckedFunction; import static org.elasticsearch.test.CheckedFunctionUtils.anyCheckedSupplier; import static org.elasticsearch.xpack.security.enrollment.EnrollmentTokenGenerator.getFilteredAddresses; +import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; @@ -248,7 +249,7 @@ public void testFailedNoCaInKeystore() throws Exception { enrollmentTokenGenerator.createNodeEnrollmentToken("elastic", new SecureString("elastic".toCharArray())).getEncoded()); assertThat( ex.getMessage(), - Matchers.equalTo( + equalTo( "Unable to create an enrollment token. Elasticsearch node HTTP layer SSL configuration Keystore doesn't " + "contain any PrivateKey entries where the associated certificate is a CA certificate" ) @@ -304,7 +305,7 @@ public void testFailedManyCaInKeystore() throws Exception { IllegalStateException ex = expectThrows(IllegalStateException.class, () -> enrollmentTokenGenerator.createNodeEnrollmentToken("elastic", new SecureString("elastic".toCharArray())).getEncoded()); - assertThat(ex.getMessage(), Matchers.equalTo("Unable to create an enrollment token. Elasticsearch node HTTP layer SSL " + + assertThat(ex.getMessage(), equalTo("Unable to create an enrollment token. Elasticsearch node HTTP layer SSL " + "configuration Keystore contains multiple PrivateKey entries where the associated certificate is a CA certificate")); } @@ -350,7 +351,7 @@ public void testEnrollmentNotEnabled() throws Exception { IllegalStateException ex = expectThrows(IllegalStateException.class, () -> enrollmentTokenGenerator.createNodeEnrollmentToken("elastic", new SecureString("elastic".toCharArray())).getEncoded()); - assertThat(ex.getMessage(), Matchers.equalTo("[xpack.security.enrollment.enabled] must be set to `true` to " + + assertThat(ex.getMessage(), equalTo("[xpack.security.enrollment.enabled] must be set to `true` to " + "create an enrollment token")); } @@ -358,25 +359,28 @@ public void testGetFilteredAddresses() throws Exception { List addresses = Arrays.asList("[::1]:9200", "127.0.0.1:9200", "192.168.0.1:9201", "172.16.254.1:9202", "[2001:db8:0:1234:0:567:8:1]:9203"); List filteredAddresses = getFilteredAddresses(addresses); - assertThat(filteredAddresses.size(), Matchers.equalTo(3)); + assertThat(filteredAddresses.size(), equalTo(3)); assertThat(filteredAddresses, Matchers.containsInAnyOrder("192.168.0.1:9201", "172.16.254.1:9202", "[2001:db8:0:1234:0:567:8:1]:9203")); + assertThat(filteredAddresses.get(2), equalTo("[2001:db8:0:1234:0:567:8:1]:9203")); addresses = Arrays.asList("[::1]:9200", "127.0.0.1:9200"); filteredAddresses = getFilteredAddresses(addresses); - assertThat(filteredAddresses.size(), Matchers.equalTo(2)); - assertThat(filteredAddresses, Matchers.containsInAnyOrder("[::1]:9200", "127.0.0.1:9200")); + assertThat(filteredAddresses.size(), equalTo(2)); + assertThat(filteredAddresses.get(0), equalTo("127.0.0.1:9200")); + assertThat(filteredAddresses.get(1), equalTo("[::1]:9200")); addresses = Arrays.asList("128.255.255.255", "[::1]:9200", "127.0.0.1:9200"); filteredAddresses = getFilteredAddresses(addresses); - assertThat(filteredAddresses.size(), Matchers.equalTo(1)); + assertThat(filteredAddresses.size(), equalTo(1)); assertThat(filteredAddresses, Matchers.containsInAnyOrder("128.255.255.255")); addresses = Arrays.asList("8.8.8.8:9200", "192.168.0.1:9201", "172.16.254.1:9202", "[2001:db8:0:1234:0:567:8:1]:9203"); filteredAddresses = getFilteredAddresses(addresses); - assertThat(filteredAddresses.size(), Matchers.equalTo(4)); + assertThat(filteredAddresses.size(), equalTo(4)); assertThat(filteredAddresses, Matchers.containsInAnyOrder("8.8.8.8:9200", "192.168.0.1:9201", "172.16.254.1:9202", "[2001:db8:0:1234:0:567:8:1]:9203")); + assertThat(filteredAddresses.get(3), equalTo("[2001:db8:0:1234:0:567:8:1]:9203")); final List invalid_addresses = Arrays.asList("nldfnbndflbnl"); UnknownHostException ex = expectThrows(UnknownHostException.class, () -> getFilteredAddresses(invalid_addresses)); From c082c2578a2ba85933e54501cf2969275c5fcdf5 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 2 Sep 2021 10:26:41 +0200 Subject: [PATCH 080/128] Fix NumberFieldMapper Referencing its Own Builder (#77131) Investigating the heap use of mapper instances I found this. It seems quite a bit of overhead for these instances goes into the builder field. In other mappers we retain the script service and the script outright, so I did the same thing here to make these instances a little smaller. --- .../elasticsearch/index/mapper/NumberFieldMapper.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index ced335a4f68c1..0ded3c7bf98fd 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -81,7 +81,7 @@ public static class Builder extends FieldMapper.Builder { private final Parameter nullValue; - private final Parameter